code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
import json
import requests
import time
class TRY():
rates = list()
def __init__(self, r):
# if(TRY.rates[-1] != r):
TRY.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"TRY: {TRY.rates}")
class USD():
rates = list()
def __init__(self, r):
# if(USD.rates[-1] != r):
USD.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"USD: {USD.rates}")
class RUB():
rates = list()
def __init__(self, r):
# if(RUB.rates[-1] != r):
RUB.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"RUB: {RUB.rates}")
class INR():
rates = list()
def __init__(self, r):
# if(INR.rates[-1] != r):
INR.rates.append(r)
def ls(self):
# print("TRY: "+TRY.rates[e] for e in range(1, len(TRY.rates)))
print(f"INR: {INR.rates}")
class Factory():
def getExchange(self, currency, rates):
if currency == "TRY":
return TRY(rates)
elif currency == "USD":
return USD(rates) # abd doları
elif currency == "RUB":
return RUB(rates) # rusya rublesi
elif currency == "INR":
return INR(rates) # hindistan rupisi
else:
return None
def main(urlAPI):
resp = requests.get(urlAPI)
if(resp.ok is True):
# print(resp.ok)
data = resp.text
jsonData = json.loads(data)
parsedData = jsonData['rates']
factory = Factory()
# print(parsedData)
for c in parsedData:
f = factory.getExchange(c, parsedData[c])
TRY.ls(f)
USD.ls(f)
RUB.ls(f)
INR.ls(f)
else:
print(resp.ok)
if __name__ == '__main__':
for i in range(3):
# time.sleep(10)
main("https://api.exchangeratesapi.io/latest")
|
normal
|
{
"blob_id": "d56aa0f0b7c420e4021736cf8f80923121856d1c",
"index": 1286,
"step-1": "<mask token>\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TRY:\n <mask token>\n\n def __init__(self, r):\n TRY.rates.append(r)\n <mask token>\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TRY:\n rates = list()\n\n def __init__(self, r):\n TRY.rates.append(r)\n\n def ls(self):\n print(f'TRY: {TRY.rates}')\n\n\nclass USD:\n rates = list()\n\n def __init__(self, r):\n USD.rates.append(r)\n\n def ls(self):\n print(f'USD: {USD.rates}')\n\n\nclass RUB:\n rates = list()\n\n def __init__(self, r):\n RUB.rates.append(r)\n\n def ls(self):\n print(f'RUB: {RUB.rates}')\n\n\nclass INR:\n rates = list()\n\n def __init__(self, r):\n INR.rates.append(r)\n\n def ls(self):\n print(f'INR: {INR.rates}')\n\n\nclass Factory:\n\n def getExchange(self, currency, rates):\n if currency == 'TRY':\n return TRY(rates)\n elif currency == 'USD':\n return USD(rates)\n elif currency == 'RUB':\n return RUB(rates)\n elif currency == 'INR':\n return INR(rates)\n else:\n return None\n\n\ndef main(urlAPI):\n resp = requests.get(urlAPI)\n if resp.ok is True:\n data = resp.text\n jsonData = json.loads(data)\n parsedData = jsonData['rates']\n factory = Factory()\n for c in parsedData:\n f = factory.getExchange(c, parsedData[c])\n TRY.ls(f)\n USD.ls(f)\n RUB.ls(f)\n INR.ls(f)\n else:\n print(resp.ok)\n\n\n<mask token>\n",
"step-5": "import json\nimport requests\nimport time\n\n\nclass TRY():\n rates = list()\n\n def __init__(self, r):\n # if(TRY.rates[-1] != r):\n TRY.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"TRY: {TRY.rates}\")\n\n\nclass USD():\n rates = list()\n\n def __init__(self, r):\n # if(USD.rates[-1] != r):\n USD.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"USD: {USD.rates}\")\n\n\nclass RUB():\n rates = list()\n\n def __init__(self, r):\n # if(RUB.rates[-1] != r):\n RUB.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"RUB: {RUB.rates}\")\n\n\nclass INR():\n rates = list()\n\n def __init__(self, r):\n # if(INR.rates[-1] != r):\n INR.rates.append(r)\n\n def ls(self):\n # print(\"TRY: \"+TRY.rates[e] for e in range(1, len(TRY.rates)))\n print(f\"INR: {INR.rates}\")\n\n\nclass Factory():\n def getExchange(self, currency, rates):\n if currency == \"TRY\":\n return TRY(rates)\n elif currency == \"USD\":\n return USD(rates) # abd doları\n elif currency == \"RUB\":\n return RUB(rates) # rusya rublesi\n elif currency == \"INR\":\n return INR(rates) # hindistan rupisi\n else:\n return None\n\n\ndef main(urlAPI):\n resp = requests.get(urlAPI)\n if(resp.ok is True):\n\n # print(resp.ok)\n data = resp.text\n jsonData = json.loads(data)\n parsedData = jsonData['rates']\n\n factory = Factory()\n # print(parsedData)\n\n for c in parsedData:\n f = factory.getExchange(c, parsedData[c])\n\n TRY.ls(f)\n USD.ls(f)\n RUB.ls(f)\n INR.ls(f)\n else:\n print(resp.ok)\n\n\nif __name__ == '__main__':\n for i in range(3):\n # time.sleep(10)\n main(\"https://api.exchangeratesapi.io/latest\")\n",
"step-ids": [
10,
14,
16,
19,
22
]
}
|
[
10,
14,
16,
19,
22
] |
"""Utils module."""
import click
import os.path
import pandas as pd
from tensorflow.keras.models import load_model
from tensorflow.keras.regularizers import l1_l2
from tensorflow.keras.callbacks import CSVLogger, ModelCheckpoint, TensorBoard
from zalando_classification.models import build_model
def get_basename(name, split_num):
return f"{name}.split{split_num:d}"
def get_model_filename_fmt(basename):
return f"{basename}.{{epoch:02d}}.h5"
def maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,
batch_norm, l1_factor, l2_factor, optimizer):
"""
Attempt to load the specified model (including architecture, weights, and
even optimizer states). If this is not possible, build a new model from
scratch.
"""
basename = get_basename(name, split_num)
model_filename_fmt = get_model_filename_fmt(basename)
model_filename = model_filename_fmt.format(epoch=resume_from_epoch)
checkpoint_path = os.path.join(checkpoint_dir, model_filename)
if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):
click.secho(f"Found model checkpoint '{checkpoint_path}'. "
f"Resuming from epoch {resume_from_epoch}.", fg='green')
model = load_model(checkpoint_path)
initial_epoch = resume_from_epoch
else:
click.secho(f"Could not load model checkpoint '{checkpoint_path}' "
"or `resume_from_epoch == 0`. Building new model.",
fg='yellow')
model = build_model(output_dim=1, batch_norm=batch_norm,
kernel_regularizer=l1_l2(l1_factor, l2_factor))
# optimizer = Adam(beta_1=0.5)
model.compile(loss='binary_crossentropy', optimizer=optimizer,
metrics=['accuracy'])
initial_epoch = 0
return model, initial_epoch
def build_callbacks(name, split_num, summary_dir, checkpoint_dir,
checkpoint_period):
basename = get_basename(name, split_num)
model_filename_fmt = get_model_filename_fmt(basename)
tensorboard_path = os.path.join(summary_dir, basename)
csv_path = os.path.join(summary_dir, f"{basename}.csv")
checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)
callbacks = []
callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))
callbacks.append(CSVLogger(csv_path, append=True))
callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period))
return callbacks
def make_plot_data(names, splits, summary_dir, pretty_name_mapping=None):
df_list = []
for name in names:
for split_num in splits:
basename = get_basename(name, split_num)
csv_path = os.path.join(summary_dir, f"{basename}.csv")
df = pd.read_csv(csv_path).assign(name=name, split=split_num)
df_list.append(df)
data = pd.concat(df_list, axis="index", sort=True) \
.rename(columns=dict(acc="train", val_acc="validation"))
if pretty_name_mapping is not None:
data = data.assign(name=data.name.replace(pretty_name_mapping))
wide_data = pd.melt(data, id_vars=["name", "split", "epoch"],
value_vars=["train", "validation"],
value_name="accuracy", var_name="partition")
return wide_data
|
normal
|
{
"blob_id": "6553312c9655c821444ff5f60e4d68c7fc08bd08",
"index": 1118,
"step-1": "<mask token>\n\n\ndef get_basename(name, split_num):\n return f'{name}.split{split_num:d}'\n\n\n<mask token>\n\n\ndef maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,\n batch_norm, l1_factor, l2_factor, optimizer):\n \"\"\"\n Attempt to load the specified model (including architecture, weights, and\n even optimizer states). If this is not possible, build a new model from\n scratch.\n \"\"\"\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n model_filename = model_filename_fmt.format(epoch=resume_from_epoch)\n checkpoint_path = os.path.join(checkpoint_dir, model_filename)\n if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):\n click.secho(\n f\"Found model checkpoint '{checkpoint_path}'. Resuming from epoch {resume_from_epoch}.\"\n , fg='green')\n model = load_model(checkpoint_path)\n initial_epoch = resume_from_epoch\n else:\n click.secho(\n f\"Could not load model checkpoint '{checkpoint_path}' or `resume_from_epoch == 0`. Building new model.\"\n , fg='yellow')\n model = build_model(output_dim=1, batch_norm=batch_norm,\n kernel_regularizer=l1_l2(l1_factor, l2_factor))\n model.compile(loss='binary_crossentropy', optimizer=optimizer,\n metrics=['accuracy'])\n initial_epoch = 0\n return model, initial_epoch\n\n\ndef build_callbacks(name, split_num, summary_dir, checkpoint_dir,\n checkpoint_period):\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n tensorboard_path = os.path.join(summary_dir, basename)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)\n callbacks = []\n callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))\n callbacks.append(CSVLogger(csv_path, append=True))\n callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period)\n )\n return callbacks\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_basename(name, split_num):\n return f'{name}.split{split_num:d}'\n\n\ndef get_model_filename_fmt(basename):\n return f'{basename}.{{epoch:02d}}.h5'\n\n\ndef maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,\n batch_norm, l1_factor, l2_factor, optimizer):\n \"\"\"\n Attempt to load the specified model (including architecture, weights, and\n even optimizer states). If this is not possible, build a new model from\n scratch.\n \"\"\"\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n model_filename = model_filename_fmt.format(epoch=resume_from_epoch)\n checkpoint_path = os.path.join(checkpoint_dir, model_filename)\n if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):\n click.secho(\n f\"Found model checkpoint '{checkpoint_path}'. Resuming from epoch {resume_from_epoch}.\"\n , fg='green')\n model = load_model(checkpoint_path)\n initial_epoch = resume_from_epoch\n else:\n click.secho(\n f\"Could not load model checkpoint '{checkpoint_path}' or `resume_from_epoch == 0`. Building new model.\"\n , fg='yellow')\n model = build_model(output_dim=1, batch_norm=batch_norm,\n kernel_regularizer=l1_l2(l1_factor, l2_factor))\n model.compile(loss='binary_crossentropy', optimizer=optimizer,\n metrics=['accuracy'])\n initial_epoch = 0\n return model, initial_epoch\n\n\ndef build_callbacks(name, split_num, summary_dir, checkpoint_dir,\n checkpoint_period):\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n tensorboard_path = os.path.join(summary_dir, basename)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)\n callbacks = []\n callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))\n callbacks.append(CSVLogger(csv_path, append=True))\n callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period)\n )\n return callbacks\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_basename(name, split_num):\n return f'{name}.split{split_num:d}'\n\n\ndef get_model_filename_fmt(basename):\n return f'{basename}.{{epoch:02d}}.h5'\n\n\ndef maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,\n batch_norm, l1_factor, l2_factor, optimizer):\n \"\"\"\n Attempt to load the specified model (including architecture, weights, and\n even optimizer states). If this is not possible, build a new model from\n scratch.\n \"\"\"\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n model_filename = model_filename_fmt.format(epoch=resume_from_epoch)\n checkpoint_path = os.path.join(checkpoint_dir, model_filename)\n if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):\n click.secho(\n f\"Found model checkpoint '{checkpoint_path}'. Resuming from epoch {resume_from_epoch}.\"\n , fg='green')\n model = load_model(checkpoint_path)\n initial_epoch = resume_from_epoch\n else:\n click.secho(\n f\"Could not load model checkpoint '{checkpoint_path}' or `resume_from_epoch == 0`. Building new model.\"\n , fg='yellow')\n model = build_model(output_dim=1, batch_norm=batch_norm,\n kernel_regularizer=l1_l2(l1_factor, l2_factor))\n model.compile(loss='binary_crossentropy', optimizer=optimizer,\n metrics=['accuracy'])\n initial_epoch = 0\n return model, initial_epoch\n\n\ndef build_callbacks(name, split_num, summary_dir, checkpoint_dir,\n checkpoint_period):\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n tensorboard_path = os.path.join(summary_dir, basename)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)\n callbacks = []\n callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))\n callbacks.append(CSVLogger(csv_path, append=True))\n callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period)\n )\n return callbacks\n\n\ndef make_plot_data(names, splits, summary_dir, pretty_name_mapping=None):\n df_list = []\n for name in names:\n for split_num in splits:\n basename = get_basename(name, split_num)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n df = pd.read_csv(csv_path).assign(name=name, split=split_num)\n df_list.append(df)\n data = pd.concat(df_list, axis='index', sort=True).rename(columns=dict(\n acc='train', val_acc='validation'))\n if pretty_name_mapping is not None:\n data = data.assign(name=data.name.replace(pretty_name_mapping))\n wide_data = pd.melt(data, id_vars=['name', 'split', 'epoch'],\n value_vars=['train', 'validation'], value_name='accuracy', var_name\n ='partition')\n return wide_data\n",
"step-4": "<mask token>\nimport click\nimport os.path\nimport pandas as pd\nfrom tensorflow.keras.models import load_model\nfrom tensorflow.keras.regularizers import l1_l2\nfrom tensorflow.keras.callbacks import CSVLogger, ModelCheckpoint, TensorBoard\nfrom zalando_classification.models import build_model\n\n\ndef get_basename(name, split_num):\n return f'{name}.split{split_num:d}'\n\n\ndef get_model_filename_fmt(basename):\n return f'{basename}.{{epoch:02d}}.h5'\n\n\ndef maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,\n batch_norm, l1_factor, l2_factor, optimizer):\n \"\"\"\n Attempt to load the specified model (including architecture, weights, and\n even optimizer states). If this is not possible, build a new model from\n scratch.\n \"\"\"\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n model_filename = model_filename_fmt.format(epoch=resume_from_epoch)\n checkpoint_path = os.path.join(checkpoint_dir, model_filename)\n if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):\n click.secho(\n f\"Found model checkpoint '{checkpoint_path}'. Resuming from epoch {resume_from_epoch}.\"\n , fg='green')\n model = load_model(checkpoint_path)\n initial_epoch = resume_from_epoch\n else:\n click.secho(\n f\"Could not load model checkpoint '{checkpoint_path}' or `resume_from_epoch == 0`. Building new model.\"\n , fg='yellow')\n model = build_model(output_dim=1, batch_norm=batch_norm,\n kernel_regularizer=l1_l2(l1_factor, l2_factor))\n model.compile(loss='binary_crossentropy', optimizer=optimizer,\n metrics=['accuracy'])\n initial_epoch = 0\n return model, initial_epoch\n\n\ndef build_callbacks(name, split_num, summary_dir, checkpoint_dir,\n checkpoint_period):\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n tensorboard_path = os.path.join(summary_dir, basename)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)\n callbacks = []\n callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))\n callbacks.append(CSVLogger(csv_path, append=True))\n callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period)\n )\n return callbacks\n\n\ndef make_plot_data(names, splits, summary_dir, pretty_name_mapping=None):\n df_list = []\n for name in names:\n for split_num in splits:\n basename = get_basename(name, split_num)\n csv_path = os.path.join(summary_dir, f'{basename}.csv')\n df = pd.read_csv(csv_path).assign(name=name, split=split_num)\n df_list.append(df)\n data = pd.concat(df_list, axis='index', sort=True).rename(columns=dict(\n acc='train', val_acc='validation'))\n if pretty_name_mapping is not None:\n data = data.assign(name=data.name.replace(pretty_name_mapping))\n wide_data = pd.melt(data, id_vars=['name', 'split', 'epoch'],\n value_vars=['train', 'validation'], value_name='accuracy', var_name\n ='partition')\n return wide_data\n",
"step-5": "\"\"\"Utils module.\"\"\"\nimport click\nimport os.path\n\nimport pandas as pd\n\nfrom tensorflow.keras.models import load_model\nfrom tensorflow.keras.regularizers import l1_l2\nfrom tensorflow.keras.callbacks import CSVLogger, ModelCheckpoint, TensorBoard\n\nfrom zalando_classification.models import build_model\n\n\ndef get_basename(name, split_num):\n\n return f\"{name}.split{split_num:d}\"\n\n\ndef get_model_filename_fmt(basename):\n\n return f\"{basename}.{{epoch:02d}}.h5\"\n\n\ndef maybe_load_model(name, split_num, checkpoint_dir, resume_from_epoch,\n batch_norm, l1_factor, l2_factor, optimizer):\n \"\"\"\n Attempt to load the specified model (including architecture, weights, and\n even optimizer states). If this is not possible, build a new model from\n scratch.\n \"\"\"\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n model_filename = model_filename_fmt.format(epoch=resume_from_epoch)\n\n checkpoint_path = os.path.join(checkpoint_dir, model_filename)\n\n if resume_from_epoch > 0 and os.path.isfile(checkpoint_path):\n\n click.secho(f\"Found model checkpoint '{checkpoint_path}'. \"\n f\"Resuming from epoch {resume_from_epoch}.\", fg='green')\n\n model = load_model(checkpoint_path)\n\n initial_epoch = resume_from_epoch\n\n else:\n\n click.secho(f\"Could not load model checkpoint '{checkpoint_path}' \"\n \"or `resume_from_epoch == 0`. Building new model.\",\n fg='yellow')\n\n model = build_model(output_dim=1, batch_norm=batch_norm,\n kernel_regularizer=l1_l2(l1_factor, l2_factor))\n # optimizer = Adam(beta_1=0.5)\n model.compile(loss='binary_crossentropy', optimizer=optimizer,\n metrics=['accuracy'])\n\n initial_epoch = 0\n\n return model, initial_epoch\n\n\ndef build_callbacks(name, split_num, summary_dir, checkpoint_dir,\n checkpoint_period):\n\n basename = get_basename(name, split_num)\n model_filename_fmt = get_model_filename_fmt(basename)\n\n tensorboard_path = os.path.join(summary_dir, basename)\n csv_path = os.path.join(summary_dir, f\"{basename}.csv\")\n checkpoint_path = os.path.join(checkpoint_dir, model_filename_fmt)\n\n callbacks = []\n callbacks.append(TensorBoard(tensorboard_path, profile_batch=0))\n callbacks.append(CSVLogger(csv_path, append=True))\n callbacks.append(ModelCheckpoint(checkpoint_path, period=checkpoint_period))\n\n return callbacks\n\n\ndef make_plot_data(names, splits, summary_dir, pretty_name_mapping=None):\n\n df_list = []\n\n for name in names:\n for split_num in splits:\n\n basename = get_basename(name, split_num)\n csv_path = os.path.join(summary_dir, f\"{basename}.csv\")\n\n df = pd.read_csv(csv_path).assign(name=name, split=split_num)\n df_list.append(df)\n\n data = pd.concat(df_list, axis=\"index\", sort=True) \\\n .rename(columns=dict(acc=\"train\", val_acc=\"validation\"))\n\n if pretty_name_mapping is not None:\n data = data.assign(name=data.name.replace(pretty_name_mapping))\n\n wide_data = pd.melt(data, id_vars=[\"name\", \"split\", \"epoch\"],\n value_vars=[\"train\", \"validation\"],\n value_name=\"accuracy\", var_name=\"partition\")\n\n return wide_data\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class PairMatcherTestCase(TestCase):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PairMatcherTestCase(TestCase):
<|reserved_special_token_0|>
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PairMatcherTestCase(TestCase):
def setUp(self):
self.company = CompanyFactory.create()
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
<|reserved_special_token_1|>
from django.test import TestCase
from core.factories import CompanyFactory, EmployeeFactory
from core.pair_matcher import MaximumWeightGraphMatcher
class PairMatcherTestCase(TestCase):
def setUp(self):
self.company = CompanyFactory.create()
def test_simple(self):
employees = EmployeeFactory.create_batch(41, company=self.company)
matcher = MaximumWeightGraphMatcher()
groups = matcher.match(self.company, employees)
print('\n'.join([','.join(e.user.username for e in group) for group in
groups]))
|
flexible
|
{
"blob_id": "0c68bd65cac3c8b9fd080900a00991b2d19260ee",
"index": 534,
"step-1": "<mask token>\n\n\nclass PairMatcherTestCase(TestCase):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass PairMatcherTestCase(TestCase):\n <mask token>\n\n def test_simple(self):\n employees = EmployeeFactory.create_batch(41, company=self.company)\n matcher = MaximumWeightGraphMatcher()\n groups = matcher.match(self.company, employees)\n print('\\n'.join([','.join(e.user.username for e in group) for group in\n groups]))\n",
"step-3": "<mask token>\n\n\nclass PairMatcherTestCase(TestCase):\n\n def setUp(self):\n self.company = CompanyFactory.create()\n\n def test_simple(self):\n employees = EmployeeFactory.create_batch(41, company=self.company)\n matcher = MaximumWeightGraphMatcher()\n groups = matcher.match(self.company, employees)\n print('\\n'.join([','.join(e.user.username for e in group) for group in\n groups]))\n",
"step-4": "from django.test import TestCase\nfrom core.factories import CompanyFactory, EmployeeFactory\nfrom core.pair_matcher import MaximumWeightGraphMatcher\n\n\nclass PairMatcherTestCase(TestCase):\n\n def setUp(self):\n self.company = CompanyFactory.create()\n\n def test_simple(self):\n employees = EmployeeFactory.create_batch(41, company=self.company)\n matcher = MaximumWeightGraphMatcher()\n groups = matcher.match(self.company, employees)\n print('\\n'.join([','.join(e.user.username for e in group) for group in\n groups]))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import tornado
import copy
class DjangoHandler(tornado.web.RequestHandler):
async def reroute(self):
http = tornado.httpclient.AsyncHTTPClient()
new_request = copy.deepcopy(self.request)
url_obj = copy.urlparse(new_request.url)
new_request.url = f"{url_obj.scheme}://localhost:9000{url_obj.path}"
return await http.fetch(new_request)
get = reroute
post = reroute
application = tornado.web.Application([
# (r'/chat', WebsocketChatHandler),
(r'/', DjangoHandler),
])
application.listen(80)
tornado.ioloop.IOLoop.current().start()
|
normal
|
{
"blob_id": "6960fc6d949512ffc783b085041f86cb791160a3",
"index": 1500,
"step-1": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\n<mask token>\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-3": "<mask token>\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([('/', DjangoHandler)])\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-4": "import tornado\nimport copy\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f'{url_obj.scheme}://localhost:9000{url_obj.path}'\n return await http.fetch(new_request)\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([('/', DjangoHandler)])\napplication.listen(80)\ntornado.ioloop.IOLoop.current().start()\n",
"step-5": "import tornado\nimport copy\n\n\nclass DjangoHandler(tornado.web.RequestHandler):\n async def reroute(self):\n http = tornado.httpclient.AsyncHTTPClient()\n\n new_request = copy.deepcopy(self.request)\n url_obj = copy.urlparse(new_request.url)\n new_request.url = f\"{url_obj.scheme}://localhost:9000{url_obj.path}\"\n\n return await http.fetch(new_request)\n\n get = reroute\n post = reroute\n\n\napplication = tornado.web.Application([\n # (r'/chat', WebsocketChatHandler),\n (r'/', DjangoHandler),\n])\napplication.listen(80)\n\ntornado.ioloop.IOLoop.current().start()\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#!/usr/bin/env python3
import datetime
import time
import board
from busio import I2C
import adafruit_bme680
# Create library object using our Bus I2C port
i2c = I2C(board.SCL, board.SDA)
bme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)
# change this to match the location's pressure (hPa) at sea level
bme680.sea_level_pressure = 1006.0
file = open("/home/pi/Payload/src/sensory/burning_data.txt","a")
while True:
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
file.write("\ntimestamp: %s " % st)
print("\ntimestamp: %s " % st)
print("Temperature: %0.1f C" % bme680.temperature)
file.write("Temperature: %0.1f C" % bme680.temperature)
print("Gas: %d ohm" % bme680.gas)
file.write("Gas: %d ohm" % bme680.gas)
print("Humidity: %0.1f %%" % bme680.humidity)
file.write("Humidity: %0.1f %%" % bme680.humidity)
print("Pressure: %0.3f hPa" % bme680.pressure)
file.write("Pressure: %0.3f hPa" % bme680.pressure)
print("Altitude = %0.2f meters" % bme680.altitude)
file.write("Altitude = %0.2f meters" % bme680.altitude)
time.sleep(1)
#>>> st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
#import datetime
|
normal
|
{
"blob_id": "ae7fc034249b7dde6d6bca33e2e6c8f464284cfc",
"index": 9718,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"step-3": "<mask token>\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\nbme680.sea_level_pressure = 1006.0\nfile = open('/home/pi/Payload/src/sensory/burning_data.txt', 'a')\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"step-4": "import datetime\nimport time\nimport board\nfrom busio import I2C\nimport adafruit_bme680\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\nbme680.sea_level_pressure = 1006.0\nfile = open('/home/pi/Payload/src/sensory/burning_data.txt', 'a')\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write('\\ntimestamp: %s ' % st)\n print('\\ntimestamp: %s ' % st)\n print('Temperature: %0.1f C' % bme680.temperature)\n file.write('Temperature: %0.1f C' % bme680.temperature)\n print('Gas: %d ohm' % bme680.gas)\n file.write('Gas: %d ohm' % bme680.gas)\n print('Humidity: %0.1f %%' % bme680.humidity)\n file.write('Humidity: %0.1f %%' % bme680.humidity)\n print('Pressure: %0.3f hPa' % bme680.pressure)\n file.write('Pressure: %0.3f hPa' % bme680.pressure)\n print('Altitude = %0.2f meters' % bme680.altitude)\n file.write('Altitude = %0.2f meters' % bme680.altitude)\n time.sleep(1)\n",
"step-5": "#!/usr/bin/env python3\nimport datetime\nimport time\nimport board\nfrom busio import I2C\nimport adafruit_bme680\n\n# Create library object using our Bus I2C port\ni2c = I2C(board.SCL, board.SDA)\nbme680 = adafruit_bme680.Adafruit_BME680_I2C(i2c, debug=False)\n\n# change this to match the location's pressure (hPa) at sea level\nbme680.sea_level_pressure = 1006.0\nfile = open(\"/home/pi/Payload/src/sensory/burning_data.txt\",\"a\")\n\nwhile True:\n ts = time.time()\n st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n file.write(\"\\ntimestamp: %s \" % st)\n print(\"\\ntimestamp: %s \" % st)\n print(\"Temperature: %0.1f C\" % bme680.temperature)\n file.write(\"Temperature: %0.1f C\" % bme680.temperature)\n print(\"Gas: %d ohm\" % bme680.gas)\n file.write(\"Gas: %d ohm\" % bme680.gas)\n print(\"Humidity: %0.1f %%\" % bme680.humidity)\n file.write(\"Humidity: %0.1f %%\" % bme680.humidity)\n print(\"Pressure: %0.3f hPa\" % bme680.pressure)\n file.write(\"Pressure: %0.3f hPa\" % bme680.pressure)\n print(\"Altitude = %0.2f meters\" % bme680.altitude)\n file.write(\"Altitude = %0.2f meters\" % bme680.altitude)\n time.sleep(1)\n\n\n#>>> st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')\n#import datetime\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from random import randint
class Game(object):
def __init__(self, players):
if len(players) < 2:
raise ValueError('Number of player must be at least 2')
self.play_order = players
self.player_data = {}
for player in self.play_order:
# [total, on_hand, hunch]
self.player_data[player] = [3, None, None, False]
self.player_data['IA 1'][3] = True
self.player_data['IA 2'][3] = True
def game_loop(self):
while not self.won():
hunches = []
for player, data in self.player_data.items():
print("Jogador: {}".format(player))
if (data[3]):
data[1] = randint(0, data[0])
else:
data[1] = randint(0, data[0])
print("Palitos na mão: {}\n".format(data[1]))
for player in self.play_order:
print("Jogador: {}".format(player))
if (self.player_data[player][3]):
hunch = self.hunch(player, hunches)
self.player_data[player][2] = hunch
else:
# random hunch
hunch = randint(0, self.max())
while hunch in hunches:
hunch = randint(0, self.max())
self.player_data[player][2] = hunch
# human hunch
# hunch = int(input("Qual seu palpite?\n"))
# while (hunch in hunches):
# hunch = int(input("Palpite invalido. \nQual seu palpite?\n"))
# self.player_data[player][2] = hunch
print("Palpite: {}\n".format(hunch))
hunches.append(hunch)
winner = self.round_won()
print("Soma dos palitos: {}".format(self.sum()))
if winner:
print("{} ganhou a rodada\n".format(winner))
self.player_data[winner][0] -= 1
self.play_order.remove(winner)
self.play_order.insert(0, winner)
else:
print("Ninguém ganhou :(\n")
print(("-" * 10) + " nova rodada " + ("-" * 10))
self.reset()
for player, data in self.player_data.items():
if data[0] == 0:
print("{} ganhou o jogo".format(player))
return player
def hunch(self, player, hunches):
# seu palpite inicial eh pelo menos a sua quantidade de palitos
hunch = self.player_data[player][1]
rand = 0
sticks = []
stik = 0
# calcula os palitos dos jogadores anteriores atraves dos palpites destes
for other_player in self.play_order[0:self.play_order.index(player)]:
# media dos proximos jogadores
average = self.average(self.play_order[self.play_order.index(other_player):len(self.play_order) - 1])
# calcula os palitos estimados do jogador
stik = self.player_data[other_player][2] - average[0]
# remove os palitos anteriores que ja estao considerados
for stick in sticks:
stik -= stick
sticks.append(stik)
# erros de arredondamento, adiciona a randomicidade esperada
rand += average[1]
hunch += stik
# chama average com os jogadores remanescente
average = self.average(self.play_order[self.play_order.index(player):len(self.play_order) - 1])
# caso o numero seja quebrado (0.5) adiciona-se 1 a randomicidade
rand += average[1]
# valor estimado, com metade da randomicidade
hunch += average[0] + rand // 2
# caso o chute ja tenha sido usado, chutar o mais proximo possivel
# começando pelo lado mais proximo da media
if (self.average(self.play_order)[0] > hunch):
i = 0
while (hunch in hunches) or (hunch > self.max()) or (hunch < 0):
i += 1
if i % 2 == 0:
hunch -= i
else:
hunch += i
else:
i = 0
while (hunch in hunches) or (hunch > self.max()) or (hunch < 0):
i += 1
if i % 2 == 0:
hunch += i
else:
hunch -= i
# retorna seu chute
return hunch
def average(self, remaining_players):
result = 0
for player in remaining_players:
result += self.player_data[player][0]
# entrega a media do resultado, e se houve sobra entrega 1 no segundo argumento
return [result // 2, result % 2]
def max(self):
total = 0
for player in self.play_order:
total += self.player_data[player][0]
return total
def reset(self):
for player, data in self.player_data.items():
data[1] = None
data[2] = None
def round_won(self):
sum = self.sum()
for player, data in self.player_data.items():
if data[2] == sum:
return player
return None
def won(self):
for player, data in self.player_data.items():
if data[0] == 0:
return True
return False
def sum(self):
sum = 0
for player, data in self.player_data.items():
sum += data[1]
return sum
if __name__ == '__main__':
players = ['Rand A', 'Rand B', 'Rand C', 'IA 1', 'IA 2']
wins = {}
n = 1
for player in players:
wins[player] = 0
for i in range(0, n):
game = Game(players)
winner = game.game_loop()
if winner:
wins[winner] += 1
print("\nRelatório:")
for player, win_count in wins.items():
print("{} ganhou {} vezes".format(player, win_count))
|
normal
|
{
"blob_id": "52f3000514fd39083daa6316d551f1685c7cea23",
"index": 6792,
"step-1": "<mask token>\n\n\nclass Game(object):\n <mask token>\n\n def game_loop(self):\n while not self.won():\n hunches = []\n for player, data in self.player_data.items():\n print('Jogador: {}'.format(player))\n if data[3]:\n data[1] = randint(0, data[0])\n else:\n data[1] = randint(0, data[0])\n print('Palitos na mão: {}\\n'.format(data[1]))\n for player in self.play_order:\n print('Jogador: {}'.format(player))\n if self.player_data[player][3]:\n hunch = self.hunch(player, hunches)\n self.player_data[player][2] = hunch\n else:\n hunch = randint(0, self.max())\n while hunch in hunches:\n hunch = randint(0, self.max())\n self.player_data[player][2] = hunch\n print('Palpite: {}\\n'.format(hunch))\n hunches.append(hunch)\n winner = self.round_won()\n print('Soma dos palitos: {}'.format(self.sum()))\n if winner:\n print('{} ganhou a rodada\\n'.format(winner))\n self.player_data[winner][0] -= 1\n self.play_order.remove(winner)\n self.play_order.insert(0, winner)\n else:\n print('Ninguém ganhou :(\\n')\n print('-' * 10 + ' nova rodada ' + '-' * 10)\n self.reset()\n for player, data in self.player_data.items():\n if data[0] == 0:\n print('{} ganhou o jogo'.format(player))\n return player\n <mask token>\n\n def average(self, remaining_players):\n result = 0\n for player in remaining_players:\n result += self.player_data[player][0]\n return [result // 2, result % 2]\n\n def max(self):\n total = 0\n for player in self.play_order:\n total += self.player_data[player][0]\n return total\n\n def reset(self):\n for player, data in self.player_data.items():\n data[1] = None\n data[2] = None\n\n def round_won(self):\n sum = self.sum()\n for player, data in self.player_data.items():\n if data[2] == sum:\n return player\n return None\n\n def won(self):\n for player, data in self.player_data.items():\n if data[0] == 0:\n return True\n return False\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Game(object):\n\n def __init__(self, players):\n if len(players) < 2:\n raise ValueError('Number of player must be at least 2')\n self.play_order = players\n self.player_data = {}\n for player in self.play_order:\n self.player_data[player] = [3, None, None, False]\n self.player_data['IA 1'][3] = True\n self.player_data['IA 2'][3] = True\n\n def game_loop(self):\n while not self.won():\n hunches = []\n for player, data in self.player_data.items():\n print('Jogador: {}'.format(player))\n if data[3]:\n data[1] = randint(0, data[0])\n else:\n data[1] = randint(0, data[0])\n print('Palitos na mão: {}\\n'.format(data[1]))\n for player in self.play_order:\n print('Jogador: {}'.format(player))\n if self.player_data[player][3]:\n hunch = self.hunch(player, hunches)\n self.player_data[player][2] = hunch\n else:\n hunch = randint(0, self.max())\n while hunch in hunches:\n hunch = randint(0, self.max())\n self.player_data[player][2] = hunch\n print('Palpite: {}\\n'.format(hunch))\n hunches.append(hunch)\n winner = self.round_won()\n print('Soma dos palitos: {}'.format(self.sum()))\n if winner:\n print('{} ganhou a rodada\\n'.format(winner))\n self.player_data[winner][0] -= 1\n self.play_order.remove(winner)\n self.play_order.insert(0, winner)\n else:\n print('Ninguém ganhou :(\\n')\n print('-' * 10 + ' nova rodada ' + '-' * 10)\n self.reset()\n for player, data in self.player_data.items():\n if data[0] == 0:\n print('{} ganhou o jogo'.format(player))\n return player\n\n def hunch(self, player, hunches):\n hunch = self.player_data[player][1]\n rand = 0\n sticks = []\n stik = 0\n for other_player in self.play_order[0:self.play_order.index(player)]:\n average = self.average(self.play_order[self.play_order.index(\n other_player):len(self.play_order) - 1])\n stik = self.player_data[other_player][2] - average[0]\n for stick in sticks:\n stik -= stick\n sticks.append(stik)\n rand += average[1]\n hunch += stik\n average = self.average(self.play_order[self.play_order.index(player\n ):len(self.play_order) - 1])\n rand += average[1]\n hunch += average[0] + rand // 2\n if self.average(self.play_order)[0] > hunch:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch -= i\n else:\n hunch += i\n else:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch += i\n else:\n hunch -= i\n return hunch\n\n def average(self, remaining_players):\n result = 0\n for player in remaining_players:\n result += self.player_data[player][0]\n return [result // 2, result % 2]\n\n def max(self):\n total = 0\n for player in self.play_order:\n total += self.player_data[player][0]\n return total\n\n def reset(self):\n for player, data in self.player_data.items():\n data[1] = None\n data[2] = None\n\n def round_won(self):\n sum = self.sum()\n for player, data in self.player_data.items():\n if data[2] == sum:\n return player\n return None\n\n def won(self):\n for player, data in self.player_data.items():\n if data[0] == 0:\n return True\n return False\n\n def sum(self):\n sum = 0\n for player, data in self.player_data.items():\n sum += data[1]\n return sum\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Game(object):\n\n def __init__(self, players):\n if len(players) < 2:\n raise ValueError('Number of player must be at least 2')\n self.play_order = players\n self.player_data = {}\n for player in self.play_order:\n self.player_data[player] = [3, None, None, False]\n self.player_data['IA 1'][3] = True\n self.player_data['IA 2'][3] = True\n\n def game_loop(self):\n while not self.won():\n hunches = []\n for player, data in self.player_data.items():\n print('Jogador: {}'.format(player))\n if data[3]:\n data[1] = randint(0, data[0])\n else:\n data[1] = randint(0, data[0])\n print('Palitos na mão: {}\\n'.format(data[1]))\n for player in self.play_order:\n print('Jogador: {}'.format(player))\n if self.player_data[player][3]:\n hunch = self.hunch(player, hunches)\n self.player_data[player][2] = hunch\n else:\n hunch = randint(0, self.max())\n while hunch in hunches:\n hunch = randint(0, self.max())\n self.player_data[player][2] = hunch\n print('Palpite: {}\\n'.format(hunch))\n hunches.append(hunch)\n winner = self.round_won()\n print('Soma dos palitos: {}'.format(self.sum()))\n if winner:\n print('{} ganhou a rodada\\n'.format(winner))\n self.player_data[winner][0] -= 1\n self.play_order.remove(winner)\n self.play_order.insert(0, winner)\n else:\n print('Ninguém ganhou :(\\n')\n print('-' * 10 + ' nova rodada ' + '-' * 10)\n self.reset()\n for player, data in self.player_data.items():\n if data[0] == 0:\n print('{} ganhou o jogo'.format(player))\n return player\n\n def hunch(self, player, hunches):\n hunch = self.player_data[player][1]\n rand = 0\n sticks = []\n stik = 0\n for other_player in self.play_order[0:self.play_order.index(player)]:\n average = self.average(self.play_order[self.play_order.index(\n other_player):len(self.play_order) - 1])\n stik = self.player_data[other_player][2] - average[0]\n for stick in sticks:\n stik -= stick\n sticks.append(stik)\n rand += average[1]\n hunch += stik\n average = self.average(self.play_order[self.play_order.index(player\n ):len(self.play_order) - 1])\n rand += average[1]\n hunch += average[0] + rand // 2\n if self.average(self.play_order)[0] > hunch:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch -= i\n else:\n hunch += i\n else:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch += i\n else:\n hunch -= i\n return hunch\n\n def average(self, remaining_players):\n result = 0\n for player in remaining_players:\n result += self.player_data[player][0]\n return [result // 2, result % 2]\n\n def max(self):\n total = 0\n for player in self.play_order:\n total += self.player_data[player][0]\n return total\n\n def reset(self):\n for player, data in self.player_data.items():\n data[1] = None\n data[2] = None\n\n def round_won(self):\n sum = self.sum()\n for player, data in self.player_data.items():\n if data[2] == sum:\n return player\n return None\n\n def won(self):\n for player, data in self.player_data.items():\n if data[0] == 0:\n return True\n return False\n\n def sum(self):\n sum = 0\n for player, data in self.player_data.items():\n sum += data[1]\n return sum\n\n\nif __name__ == '__main__':\n players = ['Rand A', 'Rand B', 'Rand C', 'IA 1', 'IA 2']\n wins = {}\n n = 1\n for player in players:\n wins[player] = 0\n for i in range(0, n):\n game = Game(players)\n winner = game.game_loop()\n if winner:\n wins[winner] += 1\n print('\\nRelatório:')\n for player, win_count in wins.items():\n print('{} ganhou {} vezes'.format(player, win_count))\n",
"step-4": "from random import randint\n\n\nclass Game(object):\n\n def __init__(self, players):\n if len(players) < 2:\n raise ValueError('Number of player must be at least 2')\n self.play_order = players\n self.player_data = {}\n for player in self.play_order:\n self.player_data[player] = [3, None, None, False]\n self.player_data['IA 1'][3] = True\n self.player_data['IA 2'][3] = True\n\n def game_loop(self):\n while not self.won():\n hunches = []\n for player, data in self.player_data.items():\n print('Jogador: {}'.format(player))\n if data[3]:\n data[1] = randint(0, data[0])\n else:\n data[1] = randint(0, data[0])\n print('Palitos na mão: {}\\n'.format(data[1]))\n for player in self.play_order:\n print('Jogador: {}'.format(player))\n if self.player_data[player][3]:\n hunch = self.hunch(player, hunches)\n self.player_data[player][2] = hunch\n else:\n hunch = randint(0, self.max())\n while hunch in hunches:\n hunch = randint(0, self.max())\n self.player_data[player][2] = hunch\n print('Palpite: {}\\n'.format(hunch))\n hunches.append(hunch)\n winner = self.round_won()\n print('Soma dos palitos: {}'.format(self.sum()))\n if winner:\n print('{} ganhou a rodada\\n'.format(winner))\n self.player_data[winner][0] -= 1\n self.play_order.remove(winner)\n self.play_order.insert(0, winner)\n else:\n print('Ninguém ganhou :(\\n')\n print('-' * 10 + ' nova rodada ' + '-' * 10)\n self.reset()\n for player, data in self.player_data.items():\n if data[0] == 0:\n print('{} ganhou o jogo'.format(player))\n return player\n\n def hunch(self, player, hunches):\n hunch = self.player_data[player][1]\n rand = 0\n sticks = []\n stik = 0\n for other_player in self.play_order[0:self.play_order.index(player)]:\n average = self.average(self.play_order[self.play_order.index(\n other_player):len(self.play_order) - 1])\n stik = self.player_data[other_player][2] - average[0]\n for stick in sticks:\n stik -= stick\n sticks.append(stik)\n rand += average[1]\n hunch += stik\n average = self.average(self.play_order[self.play_order.index(player\n ):len(self.play_order) - 1])\n rand += average[1]\n hunch += average[0] + rand // 2\n if self.average(self.play_order)[0] > hunch:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch -= i\n else:\n hunch += i\n else:\n i = 0\n while hunch in hunches or hunch > self.max() or hunch < 0:\n i += 1\n if i % 2 == 0:\n hunch += i\n else:\n hunch -= i\n return hunch\n\n def average(self, remaining_players):\n result = 0\n for player in remaining_players:\n result += self.player_data[player][0]\n return [result // 2, result % 2]\n\n def max(self):\n total = 0\n for player in self.play_order:\n total += self.player_data[player][0]\n return total\n\n def reset(self):\n for player, data in self.player_data.items():\n data[1] = None\n data[2] = None\n\n def round_won(self):\n sum = self.sum()\n for player, data in self.player_data.items():\n if data[2] == sum:\n return player\n return None\n\n def won(self):\n for player, data in self.player_data.items():\n if data[0] == 0:\n return True\n return False\n\n def sum(self):\n sum = 0\n for player, data in self.player_data.items():\n sum += data[1]\n return sum\n\n\nif __name__ == '__main__':\n players = ['Rand A', 'Rand B', 'Rand C', 'IA 1', 'IA 2']\n wins = {}\n n = 1\n for player in players:\n wins[player] = 0\n for i in range(0, n):\n game = Game(players)\n winner = game.game_loop()\n if winner:\n wins[winner] += 1\n print('\\nRelatório:')\n for player, win_count in wins.items():\n print('{} ganhou {} vezes'.format(player, win_count))\n",
"step-5": "from random import randint\n\n\nclass Game(object):\n def __init__(self, players):\n if len(players) < 2:\n raise ValueError('Number of player must be at least 2')\n\n self.play_order = players\n\n self.player_data = {}\n for player in self.play_order:\n # [total, on_hand, hunch]\n self.player_data[player] = [3, None, None, False]\n self.player_data['IA 1'][3] = True\n self.player_data['IA 2'][3] = True\n\n def game_loop(self):\n while not self.won():\n hunches = []\n\n for player, data in self.player_data.items():\n print(\"Jogador: {}\".format(player))\n if (data[3]):\n data[1] = randint(0, data[0])\n\n else:\n data[1] = randint(0, data[0])\n\n print(\"Palitos na mão: {}\\n\".format(data[1]))\n\n for player in self.play_order:\n print(\"Jogador: {}\".format(player))\n if (self.player_data[player][3]):\n hunch = self.hunch(player, hunches)\n self.player_data[player][2] = hunch\n\n else:\n # random hunch\n hunch = randint(0, self.max())\n while hunch in hunches:\n hunch = randint(0, self.max())\n self.player_data[player][2] = hunch\n\n # human hunch\n # hunch = int(input(\"Qual seu palpite?\\n\"))\n # while (hunch in hunches):\n # hunch = int(input(\"Palpite invalido. \\nQual seu palpite?\\n\"))\n # self.player_data[player][2] = hunch\n\n print(\"Palpite: {}\\n\".format(hunch))\n\n hunches.append(hunch)\n\n winner = self.round_won()\n\n print(\"Soma dos palitos: {}\".format(self.sum()))\n\n if winner:\n print(\"{} ganhou a rodada\\n\".format(winner))\n self.player_data[winner][0] -= 1\n self.play_order.remove(winner)\n self.play_order.insert(0, winner)\n else:\n print(\"Ninguém ganhou :(\\n\")\n\n print((\"-\" * 10) + \" nova rodada \" + (\"-\" * 10))\n\n self.reset()\n\n for player, data in self.player_data.items():\n if data[0] == 0:\n print(\"{} ganhou o jogo\".format(player))\n return player\n\n def hunch(self, player, hunches):\n # seu palpite inicial eh pelo menos a sua quantidade de palitos\n hunch = self.player_data[player][1]\n rand = 0\n sticks = []\n stik = 0\n # calcula os palitos dos jogadores anteriores atraves dos palpites destes\n for other_player in self.play_order[0:self.play_order.index(player)]:\n # media dos proximos jogadores\n average = self.average(self.play_order[self.play_order.index(other_player):len(self.play_order) - 1])\n\n # calcula os palitos estimados do jogador\n stik = self.player_data[other_player][2] - average[0]\n\n # remove os palitos anteriores que ja estao considerados\n for stick in sticks:\n stik -= stick\n sticks.append(stik)\n\n # erros de arredondamento, adiciona a randomicidade esperada\n rand += average[1]\n hunch += stik\n\n # chama average com os jogadores remanescente\n average = self.average(self.play_order[self.play_order.index(player):len(self.play_order) - 1])\n\n # caso o numero seja quebrado (0.5) adiciona-se 1 a randomicidade\n rand += average[1]\n\n # valor estimado, com metade da randomicidade\n hunch += average[0] + rand // 2\n\n # caso o chute ja tenha sido usado, chutar o mais proximo possivel\n # começando pelo lado mais proximo da media\n if (self.average(self.play_order)[0] > hunch):\n i = 0\n while (hunch in hunches) or (hunch > self.max()) or (hunch < 0):\n i += 1\n if i % 2 == 0:\n hunch -= i\n else:\n hunch += i\n\n else:\n i = 0\n while (hunch in hunches) or (hunch > self.max()) or (hunch < 0):\n i += 1\n if i % 2 == 0:\n hunch += i\n else:\n hunch -= i\n # retorna seu chute\n return hunch\n\n def average(self, remaining_players):\n result = 0\n for player in remaining_players:\n result += self.player_data[player][0]\n\n # entrega a media do resultado, e se houve sobra entrega 1 no segundo argumento\n return [result // 2, result % 2]\n\n def max(self):\n total = 0\n for player in self.play_order:\n total += self.player_data[player][0]\n return total\n\n def reset(self):\n for player, data in self.player_data.items():\n data[1] = None\n data[2] = None\n\n def round_won(self):\n sum = self.sum()\n\n for player, data in self.player_data.items():\n if data[2] == sum:\n return player\n return None\n\n def won(self):\n for player, data in self.player_data.items():\n if data[0] == 0:\n return True\n return False\n\n def sum(self):\n sum = 0\n\n for player, data in self.player_data.items():\n sum += data[1]\n\n return sum\n\n\nif __name__ == '__main__':\n players = ['Rand A', 'Rand B', 'Rand C', 'IA 1', 'IA 2']\n wins = {}\n\n n = 1\n\n for player in players:\n wins[player] = 0\n\n for i in range(0, n):\n game = Game(players)\n winner = game.game_loop()\n if winner:\n wins[winner] += 1\n\n print(\"\\nRelatório:\")\n for player, win_count in wins.items():\n print(\"{} ganhou {} vezes\".format(player, win_count))\n",
"step-ids": [
7,
10,
11,
12,
13
]
}
|
[
7,
10,
11,
12,
13
] |
from time import time
import threading
import os
#hh:mm:ss
movie1Time = "00:00:00"
movie2Time = "00:00:00"
movie3Time = "00:00:00"
movie4Time = "00:00:00"
movie5Time = "00:00:00"
timer1Start = None
timer1Time = "00:00:00"
timer1Running = False
timer2Start = None
timer2Time = "00:00:00"
timer2Running = False
timer3Start = None
timer3Time = "00:00:00"
timer3Running = False
timer4Start = None
timer4Time = "00:00:00"
timer4Running = False
timer5Start = None
timer5Time = "00:00:00"
timer5Running = False
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created: Wed May 21 20:35:02 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class TimerBackground(QtCore.QThread):
index_finished = QtCore.pyqtSignal([str, QtCore.QObject])
def __init__(self, timerStart, timerRunning, timerNumber, movieTime, textBrowser, parent=None):
QtCore.QThread.__init__(self, parent)
self.timerStart = timerStart
self.timerRunning = timerRunning
self.timerNumber = timerNumber
self.textBrowser = textBrowser
self.movieTime = movieTime
def run(self):
self.incrememnt(self.timerStart, self.timerRunning, self.timerNumber, self.movieTime)
def formatTime(self, time):
formattedTime = ''
hours = time / 3600
minutes = time / 60
seconds = time % 60
#handles hours
if hours == 0:
formattedTime += "00:"
elif len(str(hours)) == 1:
formattedTime += '0' + str(hours) + ':'
else:
formattedTime += str(hours)
#handles minutes
if minutes == 0:
formattedTime += "00:"
elif minutes >= 60:
newMinutes = minutes
if minutes % 60 == 0:
newMinutes = 0
while newMinutes > 60:
newMinutes -= 60
if len(str(newMinutes)) == 1:
formattedTime += '0' + str(newMinutes) + ':'
else:
formattedTime += str(newMinutes) + ':'
else:
if len(str(minutes)) == 1:
formattedTime += '0' + str(minutes) + ':'
else:
formattedTime += str(minutes)
#handles seconds
if len(str(seconds)) == 1:
formattedTime += '0' + str(seconds)
else:
formattedTime += str(seconds)
return formattedTime
def deformatTime(self, time):
timeInSecs = 0
timeInSecs += int(time[0:2]) * 3600 # hours
timeInSecs += int(time[3:5]) * 60 # minutes
timeInSecs += int(time[6:8]) # seconds
return timeInSecs
def incrememnt(self, timerStart, timerRunning, timerNumber, movieTime):
global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time
if timerRunning:
convertedTime = self.deformatTime(movieTime)
timerTime = self.formatTime(int(time()) - int(timerStart) + convertedTime)
if timerNumber == 1:
timer1Time = timerTime
self.index_finished.emit(timer1Time, self.textBrowser)
elif timerNumber == 2:
timer2Time = timerTime
self.index_finished.emit(timer2Time, self.textBrowser)
elif timerNumber == 3:
timer3Time = timerTime
self.index_finished.emit(timer3Time, self.textBrowser)
elif timerNumber == 4:
timer4Time = timerTime
self.index_finished.emit(timer4Time, self.textBrowser)
elif timerNumber == 5:
timer5Time = timerTime
self.index_finished.emit(timer5Time, self.textBrowser)
else:
timerStart = None
self.index_finished.emit('none')
return timerStart
class Ui_Form1(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.setupUi(self)
if os.path.exists(os.getcwd() + '\\settings.ini') and os.path.getsize(os.getcwd() + '\\settings.ini') > 0:
with open(os.getcwd() + '\\settings.ini', 'r') as var:
global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time
movie1Time = var.readline().strip()
self.updateGUITimers(movie1Time, self.textBrowser_6)
movie2Time = var.readline().strip()
self.updateGUITimers(movie2Time, self.textBrowser_2)
movie3Time = var.readline().strip()
self.updateGUITimers(movie3Time, self.textBrowser_5)
movie4Time = var.readline().strip()
self.updateGUITimers(movie4Time, self.textBrowser_3)
movie5Time = var.readline().strip()
self.updateGUITimers(movie5Time, self.textBrowser_4)
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(611, 289)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())
Form.setSizePolicy(sizePolicy)
Form.setMinimumSize(QtCore.QSize(611, 289))
Form.setMaximumSize(QtCore.QSize(611, 289))
self.verticalLayoutWidget = QtGui.QWidget(Form)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)
self.movieOne.setObjectName(_fromUtf8("movieOne"))
self.verticalLayout.addWidget(self.movieOne)
self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)
self.movieTwo.setObjectName(_fromUtf8("movieTwo"))
self.verticalLayout.addWidget(self.movieTwo)
self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)
self.movieThree.setObjectName(_fromUtf8("movieThree"))
self.verticalLayout.addWidget(self.movieThree)
self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)
self.movieFour.setObjectName(_fromUtf8("movieFour"))
self.verticalLayout.addWidget(self.movieFour)
self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)
self.movieFive.setObjectName(_fromUtf8("movieFive"))
self.verticalLayout.addWidget(self.movieFive)
self.DesignedBy = QtGui.QLabel(Form)
self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))
self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)
self.DesignedBy.setObjectName(_fromUtf8("DesignedBy"))
self.sourceAt = QtGui.QLabel(Form)
self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))
self.sourceAt.setObjectName(_fromUtf8("sourceAt"))
self.label = QtGui.QLabel(Form)
self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayoutWidget_2 = QtGui.QWidget(Form)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startTwo.setObjectName(_fromUtf8("startTwo"))
self.verticalLayout_2.addWidget(self.startTwo)
self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startOne.setObjectName(_fromUtf8("startOne"))
self.verticalLayout_2.addWidget(self.startOne)
self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startThree.setObjectName(_fromUtf8("startThree"))
self.verticalLayout_2.addWidget(self.startThree)
self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startFour.setObjectName(_fromUtf8("startFour"))
self.verticalLayout_2.addWidget(self.startFour)
self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)
self.startFive.setObjectName(_fromUtf8("startFive"))
self.verticalLayout_2.addWidget(self.startFive)
self.horizontalLayoutWidget = QtGui.QWidget(Form)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setMargin(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.save = QtGui.QPushButton(self.horizontalLayoutWidget)
self.save.setObjectName(_fromUtf8("save"))
self.horizontalLayout.addWidget(self.save)
self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)
self.settings.setObjectName(_fromUtf8("settings"))
self.horizontalLayout.addWidget(self.settings)
self.textBrowser_2 = QtGui.QTextBrowser(Form)
self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().hasHeightForWidth())
self.textBrowser_2.setSizePolicy(sizePolicy)
self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_2.setReadOnly(False)
self.textBrowser_2.setUndoRedoEnabled(True)
self.textBrowser_2.setObjectName(_fromUtf8("textBrowser_2"))
self.textBrowser_5 = QtGui.QTextBrowser(Form)
self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().hasHeightForWidth())
self.textBrowser_5.setSizePolicy(sizePolicy)
self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_5.setReadOnly(False)
self.textBrowser_5.setUndoRedoEnabled(True)
self.textBrowser_5.setObjectName(_fromUtf8("textBrowser_5"))
self.textBrowser_4 = QtGui.QTextBrowser(Form)
self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().hasHeightForWidth())
self.textBrowser_4.setSizePolicy(sizePolicy)
self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_4.setReadOnly(False)
self.textBrowser_4.setUndoRedoEnabled(True)
self.textBrowser_4.setObjectName(_fromUtf8("textBrowser_4"))
self.textBrowser_3 = QtGui.QTextBrowser(Form)
self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().hasHeightForWidth())
self.textBrowser_3.setSizePolicy(sizePolicy)
self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_3.setReadOnly(False)
self.textBrowser_3.setUndoRedoEnabled(True)
self.textBrowser_3.setObjectName(_fromUtf8("textBrowser_3"))
self.textBrowser_6 = QtGui.QTextBrowser(Form)
self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(113)
sizePolicy.setVerticalStretch(20)
sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().hasHeightForWidth())
self.textBrowser_6.setSizePolicy(sizePolicy)
self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))
self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textBrowser_6.setReadOnly(False)
self.textBrowser_6.setUndoRedoEnabled(True)
self.textBrowser_6.setObjectName(_fromUtf8("textBrowser_6"))
self.line = QtGui.QFrame(Form)
self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))
self.line.setFrameShape(QtGui.QFrame.VLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.label_2 = QtGui.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))
self.label_2.setOpenExternalLinks(True)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(Form)
self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))
self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_3.setText(_fromUtf8(""))
self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8("logo.jpg")))
self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.retranslateUi(Form)
QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie1)
QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie2)
QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie3)
QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie4)
QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8("textChanged()")), Form.changeMovie5)
QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer1State)
QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer2State)
QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer3State)
QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer4State)
QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.changeTimer5State)
QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.saveChanges)
QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8("pressed()")), Form.reset)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Multiple Movie Timer", None))
self.movieOne.setText(_translate("Form", "Movie 1", None))
self.movieTwo.setText(_translate("Form", "Movie 2", None))
self.movieThree.setText(_translate("Form", "Movie 3", None))
self.movieFour.setText(_translate("Form", "Movie 4", None))
self.movieFive.setText(_translate("Form", "Movie 5", None))
self.DesignedBy.setText(_translate("Form", "This program was\n"
"designed by:", None))
self.sourceAt.setText(_translate("Form", " Source is available at:", None))
self.label.setText(_translate("Form", "V 1.2", None))
self.startTwo.setText(_translate("Form", "Start / Stop", None))
self.startOne.setText(_translate("Form", "Start / Stop", None))
self.startThree.setText(_translate("Form", "Start / Stop", None))
self.startFour.setText(_translate("Form", "Start / Stop", None))
self.startFive.setText(_translate("Form", "Start / Stop", None))
self.save.setToolTip(_translate("Form", "<html><head/><body><p>Save all the current times</p></body></html>", None))
self.save.setText(_translate("Form", "Save", None))
self.settings.setText(_translate("Form", "Reset timers", None))
self.textBrowser_2.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_5.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_4.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_3.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.textBrowser_6.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>", None))
self.label_2.setText(_translate("Form", "<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>", None))
def changeMovie1(self):
pass
def changeMovie2(self):
pass
def changeMovie3(self):
pass
def changeMovie4(self):
pass
def changeMovie5(self):
pass
def changeTimer1State(self):
global movie1Time, timer1Running, timer1Start, timer1Time
if not timer1Running:
timer1Running = True
timer1Start = time()
self.thread1 = TimerBackground(timer1Start, timer1Running, 1, movie1Time, self.textBrowser_6)
self.thread1.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer1Running:
self.thread1.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer1Running:
timer1Running = False
movie1Time = timer1Time
def changeTimer2State(self):
global movie2Time, timer2Running, timer2Start, timer2Time
if not timer2Running:
timer2Running = True
timer2Start = time()
self.thread2 = TimerBackground(timer2Start, timer2Running, 2, movie2Time, self.textBrowser_2)
self.thread2.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer2Running:
self.thread2.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer2Running:
timer2Running = False
movie2Time = timer2Time
def changeTimer3State(self):
global movie3Time, timer3Running, timer3Start, timer3Time
if not timer3Running:
timer3Running = True
timer3Start = time()
self.thread3 = TimerBackground(timer3Start, timer3Running, 3, movie3Time, self.textBrowser_5)
self.thread3.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer3Running:
self.thread3.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer3Running:
timer3Running = False
movie3Time = timer3Time
def changeTimer4State(self):
global movie4Time, timer4Running, timer4Start, timer4Time
if not timer4Running:
timer4Running = True
timer4Start = time()
self.thread4 = TimerBackground(timer4Start, timer4Running, 4, movie4Time, self.textBrowser_3)
self.thread4.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer4Running:
self.thread4.start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer4Running:
timer4Running = False
movie4Time = timer4Time
def changeTimer5State(self):
global movie5Time, timer5Running, timer5Start, timer5Time
if not timer5Running:
timer5Running = True
timer5Start = time()
self.thread5 = TimerBackground(timer5Start, timer5Running, 5, movie5Time, self.textBrowser_4)
self.thread5.index_finished.connect(self.updateGUITimers)
def loopThread():
if timer5Running:
self.thread5 .start()
threading.Timer(1, loopThread).start()
loopThread()
elif timer5Running:
timer5Running = False
movie5Time = timer5Time
def reset(self):
global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time
global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time
self.updateGUITimers('00:00:00', self.textBrowser_2)
self.updateGUITimers('00:00:00', self.textBrowser_3)
self.updateGUITimers('00:00:00', self.textBrowser_4)
self.updateGUITimers('00:00:00', self.textBrowser_5)
self.updateGUITimers('00:00:00', self.textBrowser_6)
timerStartingValue = '00:00:00'
movie1Time = timerStartingValue
movie2Time = timerStartingValue
movie3Time = timerStartingValue
movie4Time = timerStartingValue
movie5Time = timerStartingValue
timer1Time = timerStartingValue
timer2Time = timerStartingValue
timer3Time = timerStartingValue
timer4Time = timerStartingValue
timer5time = timerStartingValue
def saveChanges(self):
cwd = os.getcwd()
with open(cwd + '\\settings.ini', 'w') as var:
toWrite = [movie1Time, movie2Time, movie3Time, movie4Time, movie5Time]
for i in toWrite:
var.write(i + '\n')
def updateGUITimers(self, time, textBrowser):
if time != 'none':
textBrowser.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">" + str(time) + "</span></p></body></html>", None))
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
ex = Ui_Form1()
ex.show()
sys.exit(app.exec_())
|
normal
|
{
"blob_id": "cef4568b4568bceeedca6d57c0ccacfaae67c061",
"index": 147,
"step-1": "<mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n <mask token>\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n <mask token>\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n <mask token>\n <mask token>\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n <mask token>\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n if hours == 0:\n formattedTime += '00:'\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n if minutes == 0:\n formattedTime += '00:'\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n elif len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TimerBackground(QtCore.QThread):\n <mask token>\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime,\n textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n\n def run(self):\n self.incrememnt(self.timerStart, self.timerRunning, self.\n timerNumber, self.movieTime)\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n if hours == 0:\n formattedTime += '00:'\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n if minutes == 0:\n formattedTime += '00:'\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n elif len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600\n timeInSecs += int(time[3:5]) * 60\n timeInSecs += int(time[6:8])\n return timeInSecs\n <mask token>\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(\n os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8('Form'))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.\n QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\n 'verticalLayoutWidget'))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8('movieOne'))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8('movieTwo'))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8('movieThree'))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8('movieFour'))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8('movieFive'))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8('DesignedBy'))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8('sourceAt'))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8('label'))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261)\n )\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\n 'verticalLayoutWidget_2'))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8('startTwo'))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8('startOne'))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8('startThree'))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8('startFour'))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8('startFive'))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80)\n )\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\n 'horizontalLayoutWidget'))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8('horizontalLayout'))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8('save'))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8('settings'))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8('textBrowser_2'))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8('textBrowser_5'))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8('textBrowser_4'))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.\n QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8('textBrowser_3'))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.\n QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().\n hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.\n ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8('textBrowser_6'))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8('line'))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8('label_2'))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(''))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8('logo.jpg')))\n self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTop |\n QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8('label_3'))\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\n 'textChanged()')), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\n 'pressed()')), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate('Form', 'Multiple Movie Timer', None))\n self.movieOne.setText(_translate('Form', 'Movie 1', None))\n self.movieTwo.setText(_translate('Form', 'Movie 2', None))\n self.movieThree.setText(_translate('Form', 'Movie 3', None))\n self.movieFour.setText(_translate('Form', 'Movie 4', None))\n self.movieFive.setText(_translate('Form', 'Movie 5', None))\n self.DesignedBy.setText(_translate('Form',\n 'This program was\\ndesigned by:', None))\n self.sourceAt.setText(_translate('Form', ' Source is available at:',\n None))\n self.label.setText(_translate('Form', 'V 1.2', None))\n self.startTwo.setText(_translate('Form', 'Start / Stop', None))\n self.startOne.setText(_translate('Form', 'Start / Stop', None))\n self.startThree.setText(_translate('Form', 'Start / Stop', None))\n self.startFour.setText(_translate('Form', 'Start / Stop', None))\n self.startFive.setText(_translate('Form', 'Start / Stop', None))\n self.save.setToolTip(_translate('Form',\n '<html><head/><body><p>Save all the current times</p></body></html>'\n , None))\n self.save.setText(_translate('Form', 'Save', None))\n self.settings.setText(_translate('Form', 'Reset timers', None))\n self.textBrowser_2.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_5.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_4.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_3.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.textBrowser_6.setHtml(_translate('Form',\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">00:00:00</span></p></body></html>\"\"\"\n , None))\n self.label_2.setText(_translate('Form',\n '<html><head/><body><p><a href=\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\"><span style=\" text-decoration: underline; color:#0000ff;\">https://github.com/tmwbook</span></a></p></body></html>'\n , None))\n\n def changeMovie1(self):\n pass\n\n def changeMovie2(self):\n pass\n\n def changeMovie3(self):\n pass\n\n def changeMovie4(self):\n pass\n\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1,\n movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2,\n movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3,\n movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4,\n movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5,\n movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time,\n movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate('Form', \n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\np, li { white-space: pre-wrap; }\n</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">\"\"\"\n + str(time) + '</span></p></body></html>', None))\n\n\n<mask token>\n",
"step-5": "from time import time\nimport threading\nimport os\n#hh:mm:ss\nmovie1Time = \"00:00:00\"\nmovie2Time = \"00:00:00\"\nmovie3Time = \"00:00:00\"\nmovie4Time = \"00:00:00\"\nmovie5Time = \"00:00:00\"\n\ntimer1Start = None\ntimer1Time = \"00:00:00\"\ntimer1Running = False\ntimer2Start = None\ntimer2Time = \"00:00:00\"\ntimer2Running = False\ntimer3Start = None\ntimer3Time = \"00:00:00\"\ntimer3Running = False\ntimer4Start = None\ntimer4Time = \"00:00:00\"\ntimer4Running = False\ntimer5Start = None\ntimer5Time = \"00:00:00\"\ntimer5Running = False\n\n# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'main.ui'\n#\n# Created: Wed May 21 20:35:02 2014\n# by: PyQt4 UI code generator 4.10.4\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt4 import QtCore, QtGui\nimport sys\ntry:\n _fromUtf8 = QtCore.QString.fromUtf8\nexcept AttributeError:\n def _fromUtf8(s):\n return s\n\ntry:\n _encoding = QtGui.QApplication.UnicodeUTF8\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig, _encoding)\nexcept AttributeError:\n def _translate(context, text, disambig):\n return QtGui.QApplication.translate(context, text, disambig)\n\n\nclass TimerBackground(QtCore.QThread):\n index_finished = QtCore.pyqtSignal([str, QtCore.QObject])\n\n def __init__(self, timerStart, timerRunning, timerNumber, movieTime, textBrowser, parent=None):\n QtCore.QThread.__init__(self, parent)\n self.timerStart = timerStart\n self.timerRunning = timerRunning\n self.timerNumber = timerNumber\n self.textBrowser = textBrowser\n self.movieTime = movieTime\n\n def run(self):\n self.incrememnt(self.timerStart, self.timerRunning, self.timerNumber, self.movieTime)\n\n def formatTime(self, time):\n formattedTime = ''\n hours = time / 3600\n minutes = time / 60\n seconds = time % 60\n #handles hours\n if hours == 0:\n formattedTime += \"00:\"\n elif len(str(hours)) == 1:\n formattedTime += '0' + str(hours) + ':'\n else:\n formattedTime += str(hours)\n #handles minutes\n if minutes == 0:\n formattedTime += \"00:\"\n elif minutes >= 60:\n newMinutes = minutes\n if minutes % 60 == 0:\n newMinutes = 0\n while newMinutes > 60:\n newMinutes -= 60\n if len(str(newMinutes)) == 1:\n formattedTime += '0' + str(newMinutes) + ':'\n else:\n formattedTime += str(newMinutes) + ':'\n else:\n if len(str(minutes)) == 1:\n formattedTime += '0' + str(minutes) + ':'\n else:\n formattedTime += str(minutes)\n #handles seconds\n if len(str(seconds)) == 1:\n formattedTime += '0' + str(seconds)\n else:\n formattedTime += str(seconds)\n return formattedTime\n\n def deformatTime(self, time):\n timeInSecs = 0\n timeInSecs += int(time[0:2]) * 3600 # hours\n timeInSecs += int(time[3:5]) * 60 # minutes\n timeInSecs += int(time[6:8]) # seconds\n return timeInSecs\n\n def incrememnt(self, timerStart, timerRunning, timerNumber, movieTime):\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n if timerRunning:\n convertedTime = self.deformatTime(movieTime)\n timerTime = self.formatTime(int(time()) - int(timerStart) + convertedTime)\n if timerNumber == 1:\n timer1Time = timerTime\n self.index_finished.emit(timer1Time, self.textBrowser)\n elif timerNumber == 2:\n timer2Time = timerTime\n self.index_finished.emit(timer2Time, self.textBrowser)\n elif timerNumber == 3:\n timer3Time = timerTime\n self.index_finished.emit(timer3Time, self.textBrowser)\n elif timerNumber == 4:\n timer4Time = timerTime\n self.index_finished.emit(timer4Time, self.textBrowser)\n elif timerNumber == 5:\n timer5Time = timerTime\n self.index_finished.emit(timer5Time, self.textBrowser)\n else:\n timerStart = None\n self.index_finished.emit('none')\n return timerStart\n\n\nclass Ui_Form1(QtGui.QWidget):\n\n def __init__(self):\n QtGui.QWidget.__init__(self)\n self.setupUi(self)\n if os.path.exists(os.getcwd() + '\\\\settings.ini') and os.path.getsize(os.getcwd() + '\\\\settings.ini') > 0:\n with open(os.getcwd() + '\\\\settings.ini', 'r') as var:\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n movie1Time = var.readline().strip()\n self.updateGUITimers(movie1Time, self.textBrowser_6)\n movie2Time = var.readline().strip()\n self.updateGUITimers(movie2Time, self.textBrowser_2)\n movie3Time = var.readline().strip()\n self.updateGUITimers(movie3Time, self.textBrowser_5)\n movie4Time = var.readline().strip()\n self.updateGUITimers(movie4Time, self.textBrowser_3)\n movie5Time = var.readline().strip()\n self.updateGUITimers(movie5Time, self.textBrowser_4)\n\n def setupUi(self, Form):\n Form.setObjectName(_fromUtf8(\"Form\"))\n Form.resize(611, 289)\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)\n sizePolicy.setHorizontalStretch(0)\n sizePolicy.setVerticalStretch(0)\n sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())\n Form.setSizePolicy(sizePolicy)\n Form.setMinimumSize(QtCore.QSize(611, 289))\n Form.setMaximumSize(QtCore.QSize(611, 289))\n self.verticalLayoutWidget = QtGui.QWidget(Form)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(30, 20, 61, 261))\n self.verticalLayoutWidget.setObjectName(_fromUtf8(\"verticalLayoutWidget\"))\n self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)\n self.verticalLayout.setMargin(0)\n self.verticalLayout.setObjectName(_fromUtf8(\"verticalLayout\"))\n self.movieOne = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieOne.setObjectName(_fromUtf8(\"movieOne\"))\n self.verticalLayout.addWidget(self.movieOne)\n self.movieTwo = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieTwo.setObjectName(_fromUtf8(\"movieTwo\"))\n self.verticalLayout.addWidget(self.movieTwo)\n self.movieThree = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieThree.setObjectName(_fromUtf8(\"movieThree\"))\n self.verticalLayout.addWidget(self.movieThree)\n self.movieFour = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFour.setObjectName(_fromUtf8(\"movieFour\"))\n self.verticalLayout.addWidget(self.movieFour)\n self.movieFive = QtGui.QLabel(self.verticalLayoutWidget)\n self.movieFive.setObjectName(_fromUtf8(\"movieFive\"))\n self.verticalLayout.addWidget(self.movieFive)\n self.DesignedBy = QtGui.QLabel(Form)\n self.DesignedBy.setGeometry(QtCore.QRect(440, 40, 111, 31))\n self.DesignedBy.setAlignment(QtCore.Qt.AlignCenter)\n self.DesignedBy.setObjectName(_fromUtf8(\"DesignedBy\"))\n self.sourceAt = QtGui.QLabel(Form)\n self.sourceAt.setGeometry(QtCore.QRect(440, 170, 111, 20))\n self.sourceAt.setObjectName(_fromUtf8(\"sourceAt\"))\n self.label = QtGui.QLabel(Form)\n self.label.setGeometry(QtCore.QRect(580, 270, 31, 16))\n self.label.setObjectName(_fromUtf8(\"label\"))\n self.verticalLayoutWidget_2 = QtGui.QWidget(Form)\n self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(210, 40, 101, 261))\n self.verticalLayoutWidget_2.setObjectName(_fromUtf8(\"verticalLayoutWidget_2\"))\n self.verticalLayout_2 = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)\n self.verticalLayout_2.setMargin(0)\n self.verticalLayout_2.setObjectName(_fromUtf8(\"verticalLayout_2\"))\n self.startTwo = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startTwo.setObjectName(_fromUtf8(\"startTwo\"))\n self.verticalLayout_2.addWidget(self.startTwo)\n self.startOne = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startOne.setObjectName(_fromUtf8(\"startOne\"))\n self.verticalLayout_2.addWidget(self.startOne)\n self.startThree = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startThree.setObjectName(_fromUtf8(\"startThree\"))\n self.verticalLayout_2.addWidget(self.startThree)\n self.startFour = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFour.setObjectName(_fromUtf8(\"startFour\"))\n self.verticalLayout_2.addWidget(self.startFour)\n self.startFive = QtGui.QPushButton(self.verticalLayoutWidget_2)\n self.startFive.setObjectName(_fromUtf8(\"startFive\"))\n self.verticalLayout_2.addWidget(self.startFive)\n self.horizontalLayoutWidget = QtGui.QWidget(Form)\n self.horizontalLayoutWidget.setGeometry(QtCore.QRect(400, 230, 160, 80))\n self.horizontalLayoutWidget.setObjectName(_fromUtf8(\"horizontalLayoutWidget\"))\n self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)\n self.horizontalLayout.setMargin(0)\n self.horizontalLayout.setObjectName(_fromUtf8(\"horizontalLayout\"))\n self.save = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.save.setObjectName(_fromUtf8(\"save\"))\n self.horizontalLayout.addWidget(self.save)\n self.settings = QtGui.QPushButton(self.horizontalLayoutWidget)\n self.settings.setObjectName(_fromUtf8(\"settings\"))\n self.horizontalLayout.addWidget(self.settings)\n self.textBrowser_2 = QtGui.QTextBrowser(Form)\n self.textBrowser_2.setGeometry(QtCore.QRect(90, 110, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_2.sizePolicy().hasHeightForWidth())\n self.textBrowser_2.setSizePolicy(sizePolicy)\n self.textBrowser_2.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_2.setReadOnly(False)\n self.textBrowser_2.setUndoRedoEnabled(True)\n self.textBrowser_2.setObjectName(_fromUtf8(\"textBrowser_2\"))\n self.textBrowser_5 = QtGui.QTextBrowser(Form)\n self.textBrowser_5.setGeometry(QtCore.QRect(90, 160, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_5.sizePolicy().hasHeightForWidth())\n self.textBrowser_5.setSizePolicy(sizePolicy)\n self.textBrowser_5.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_5.setReadOnly(False)\n self.textBrowser_5.setUndoRedoEnabled(True)\n self.textBrowser_5.setObjectName(_fromUtf8(\"textBrowser_5\"))\n self.textBrowser_4 = QtGui.QTextBrowser(Form)\n self.textBrowser_4.setGeometry(QtCore.QRect(90, 260, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_4.sizePolicy().hasHeightForWidth())\n self.textBrowser_4.setSizePolicy(sizePolicy)\n self.textBrowser_4.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_4.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_4.setReadOnly(False)\n self.textBrowser_4.setUndoRedoEnabled(True)\n self.textBrowser_4.setObjectName(_fromUtf8(\"textBrowser_4\"))\n self.textBrowser_3 = QtGui.QTextBrowser(Form)\n self.textBrowser_3.setGeometry(QtCore.QRect(90, 210, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_3.sizePolicy().hasHeightForWidth())\n self.textBrowser_3.setSizePolicy(sizePolicy)\n self.textBrowser_3.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_3.setReadOnly(False)\n self.textBrowser_3.setUndoRedoEnabled(True)\n self.textBrowser_3.setObjectName(_fromUtf8(\"textBrowser_3\"))\n self.textBrowser_6 = QtGui.QTextBrowser(Form)\n self.textBrowser_6.setGeometry(QtCore.QRect(90, 60, 113, 21))\n sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)\n sizePolicy.setHorizontalStretch(113)\n sizePolicy.setVerticalStretch(20)\n sizePolicy.setHeightForWidth(self.textBrowser_6.sizePolicy().hasHeightForWidth())\n self.textBrowser_6.setSizePolicy(sizePolicy)\n self.textBrowser_6.setMinimumSize(QtCore.QSize(113, 20))\n self.textBrowser_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)\n self.textBrowser_6.setReadOnly(False)\n self.textBrowser_6.setUndoRedoEnabled(True)\n self.textBrowser_6.setObjectName(_fromUtf8(\"textBrowser_6\"))\n self.line = QtGui.QFrame(Form)\n self.line.setGeometry(QtCore.QRect(340, 50, 20, 211))\n self.line.setFrameShape(QtGui.QFrame.VLine)\n self.line.setFrameShadow(QtGui.QFrame.Sunken)\n self.line.setObjectName(_fromUtf8(\"line\"))\n self.label_2 = QtGui.QLabel(Form)\n self.label_2.setGeometry(QtCore.QRect(430, 190, 151, 20))\n self.label_2.setOpenExternalLinks(True)\n self.label_2.setObjectName(_fromUtf8(\"label_2\"))\n self.label_3 = QtGui.QLabel(Form)\n self.label_3.setGeometry(QtCore.QRect(420, 80, 161, 91))\n self.label_3.setLayoutDirection(QtCore.Qt.LeftToRight)\n self.label_3.setText(_fromUtf8(\"\"))\n self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8(\"logo.jpg\")))\n self.label_3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTop|QtCore.Qt.AlignTrailing)\n self.label_3.setObjectName(_fromUtf8(\"label_3\"))\n\n self.retranslateUi(Form)\n QtCore.QObject.connect(self.textBrowser_6, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie1)\n QtCore.QObject.connect(self.textBrowser_2, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie2)\n QtCore.QObject.connect(self.textBrowser_5, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie3)\n QtCore.QObject.connect(self.textBrowser_3, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie4)\n QtCore.QObject.connect(self.textBrowser_4, QtCore.SIGNAL(_fromUtf8(\"textChanged()\")), Form.changeMovie5)\n QtCore.QObject.connect(self.startTwo, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer1State)\n QtCore.QObject.connect(self.startOne, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer2State)\n QtCore.QObject.connect(self.startThree, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer3State)\n QtCore.QObject.connect(self.startFour, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer4State)\n QtCore.QObject.connect(self.startFive, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.changeTimer5State)\n QtCore.QObject.connect(self.save, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.saveChanges)\n QtCore.QObject.connect(self.settings, QtCore.SIGNAL(_fromUtf8(\"pressed()\")), Form.reset)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(_translate(\"Form\", \"Multiple Movie Timer\", None))\n self.movieOne.setText(_translate(\"Form\", \"Movie 1\", None))\n self.movieTwo.setText(_translate(\"Form\", \"Movie 2\", None))\n self.movieThree.setText(_translate(\"Form\", \"Movie 3\", None))\n self.movieFour.setText(_translate(\"Form\", \"Movie 4\", None))\n self.movieFive.setText(_translate(\"Form\", \"Movie 5\", None))\n self.DesignedBy.setText(_translate(\"Form\", \"This program was\\n\"\n\"designed by:\", None))\n self.sourceAt.setText(_translate(\"Form\", \" Source is available at:\", None))\n self.label.setText(_translate(\"Form\", \"V 1.2\", None))\n self.startTwo.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startOne.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startThree.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startFour.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.startFive.setText(_translate(\"Form\", \"Start / Stop\", None))\n self.save.setToolTip(_translate(\"Form\", \"<html><head/><body><p>Save all the current times</p></body></html>\", None))\n self.save.setText(_translate(\"Form\", \"Save\", None))\n self.settings.setText(_translate(\"Form\", \"Reset timers\", None))\n self.textBrowser_2.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_5.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_4.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_3.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.textBrowser_6.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n\"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n\"p, li { white-space: pre-wrap; }\\n\"\n\"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n\"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">00:00:00</span></p></body></html>\", None))\n self.label_2.setText(_translate(\"Form\", \"<html><head/><body><p><a href=\\\"https://github.com/tmwbook/small-projects/tree/Master/MultipleMovieTimer\\\"><span style=\\\" text-decoration: underline; color:#0000ff;\\\">https://github.com/tmwbook</span></a></p></body></html>\", None))\n\n\n def changeMovie1(self):\n pass\n def changeMovie2(self):\n pass\n def changeMovie3(self):\n pass\n def changeMovie4(self):\n pass\n def changeMovie5(self):\n pass\n\n def changeTimer1State(self):\n global movie1Time, timer1Running, timer1Start, timer1Time\n if not timer1Running:\n timer1Running = True\n timer1Start = time()\n self.thread1 = TimerBackground(timer1Start, timer1Running, 1, movie1Time, self.textBrowser_6)\n self.thread1.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer1Running:\n self.thread1.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer1Running:\n timer1Running = False\n movie1Time = timer1Time\n\n def changeTimer2State(self):\n global movie2Time, timer2Running, timer2Start, timer2Time\n if not timer2Running:\n timer2Running = True\n timer2Start = time()\n self.thread2 = TimerBackground(timer2Start, timer2Running, 2, movie2Time, self.textBrowser_2)\n self.thread2.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer2Running:\n self.thread2.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer2Running:\n timer2Running = False\n movie2Time = timer2Time\n\n def changeTimer3State(self):\n global movie3Time, timer3Running, timer3Start, timer3Time\n if not timer3Running:\n timer3Running = True\n timer3Start = time()\n self.thread3 = TimerBackground(timer3Start, timer3Running, 3, movie3Time, self.textBrowser_5)\n self.thread3.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer3Running:\n self.thread3.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer3Running:\n timer3Running = False\n movie3Time = timer3Time\n\n def changeTimer4State(self):\n global movie4Time, timer4Running, timer4Start, timer4Time\n if not timer4Running:\n timer4Running = True\n timer4Start = time()\n self.thread4 = TimerBackground(timer4Start, timer4Running, 4, movie4Time, self.textBrowser_3)\n self.thread4.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer4Running:\n self.thread4.start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer4Running:\n timer4Running = False\n movie4Time = timer4Time\n\n def changeTimer5State(self):\n global movie5Time, timer5Running, timer5Start, timer5Time\n if not timer5Running:\n timer5Running = True\n timer5Start = time()\n self.thread5 = TimerBackground(timer5Start, timer5Running, 5, movie5Time, self.textBrowser_4)\n self.thread5.index_finished.connect(self.updateGUITimers)\n\n def loopThread():\n if timer5Running:\n self.thread5 .start()\n threading.Timer(1, loopThread).start()\n loopThread()\n elif timer5Running:\n timer5Running = False\n movie5Time = timer5Time\n\n def reset(self):\n global movie1Time, movie2Time, movie3Time, movie4Time, movie5Time\n global timer1Time, timer2Time, timer3Time, timer4Time, timer5Time\n self.updateGUITimers('00:00:00', self.textBrowser_2)\n self.updateGUITimers('00:00:00', self.textBrowser_3)\n self.updateGUITimers('00:00:00', self.textBrowser_4)\n self.updateGUITimers('00:00:00', self.textBrowser_5)\n self.updateGUITimers('00:00:00', self.textBrowser_6)\n timerStartingValue = '00:00:00'\n movie1Time = timerStartingValue\n movie2Time = timerStartingValue\n movie3Time = timerStartingValue\n movie4Time = timerStartingValue\n movie5Time = timerStartingValue\n timer1Time = timerStartingValue\n timer2Time = timerStartingValue\n timer3Time = timerStartingValue\n timer4Time = timerStartingValue\n timer5time = timerStartingValue\n\n def saveChanges(self):\n cwd = os.getcwd()\n with open(cwd + '\\\\settings.ini', 'w') as var:\n toWrite = [movie1Time, movie2Time, movie3Time, movie4Time, movie5Time]\n for i in toWrite:\n var.write(i + '\\n')\n\n def updateGUITimers(self, time, textBrowser):\n if time != 'none':\n textBrowser.setHtml(_translate(\"Form\", \"<!DOCTYPE HTML PUBLIC \\\"-//W3C//DTD HTML 4.0//EN\\\" \\\"http://www.w3.org/TR/REC-html40/strict.dtd\\\">\\n\"\n \"<html><head><meta name=\\\"qrichtext\\\" content=\\\"1\\\" /><style type=\\\"text/css\\\">\\n\"\n \"p, li { white-space: pre-wrap; }\\n\"\n \"</style></head><body style=\\\" font-family:\\'MS Shell Dlg 2\\'; font-size:8.25pt; font-weight:400; font-style:normal;\\\">\\n\"\n \"<p align=\\\"right\\\" style=\\\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\\\"><span style=\\\" font-size:8pt;\\\">\" + str(time) + \"</span></p></body></html>\", None))\n\n\nif __name__ == \"__main__\":\n app = QtGui.QApplication(sys.argv)\n ex = Ui_Form1()\n ex.show()\n sys.exit(app.exec_())",
"step-ids": [
15,
20,
21,
22,
28
]
}
|
[
15,
20,
21,
22,
28
] |
from collections import deque
def solution(people, limit):
people.sort()
cnt = 0
left_idx = 0
right_idx = len(people) - 1
while left_idx <= right_idx:
if people[left_idx] + people[right_idx] <= limit:
cnt += 1
left_idx += 1
right_idx -= 1
else:
cnt += 1
right_idx -= 1
answer = cnt
return answer
|
normal
|
{
"blob_id": "b0dbc4e8a2ce41dc9d2040890e3df4d078680fa1",
"index": 5444,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef solution(people, limit):\n people.sort()\n cnt = 0\n left_idx = 0\n right_idx = len(people) - 1\n while left_idx <= right_idx:\n if people[left_idx] + people[right_idx] <= limit:\n cnt += 1\n left_idx += 1\n right_idx -= 1\n else:\n cnt += 1\n right_idx -= 1\n answer = cnt\n return answer\n",
"step-3": "from collections import deque\n\n\ndef solution(people, limit):\n people.sort()\n cnt = 0\n left_idx = 0\n right_idx = len(people) - 1\n while left_idx <= right_idx:\n if people[left_idx] + people[right_idx] <= limit:\n cnt += 1\n left_idx += 1\n right_idx -= 1\n else:\n cnt += 1\n right_idx -= 1\n answer = cnt\n return answer\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import requests
from urllib.parse import urlparse
from bs4 import BeautifulSoup
import re
import datetime
import random
pages = set()
# Retrieve a list of all Internal links foound on a page.
def getInternalLinks(bs, includeUrl):
includeUrl = f'{urlparse(includeUrl).scheme}://{urlparse(includeUrl).netloc}'
internalLinks = []
# Finds all links thhat begin with a "/"
for link in bs.find_all('a',
href=re.compile('^(/|.*'+includeUrl+')')):
if link.attrs['href'] is not None:
if link.attrs['href'] not in internalLinks:
if link.attrs['href'].startswith('/'):
internalLinks.append(includeUrl+link.attrs['href'])
else:
internalLinks.append(link.attrs['href'])
return internalLinks
# Retrieves a list of all external links found on a pagee.
def getExternalLinks(bs, excludeUrl):
externalLinks = []
# Finds all links that starts with "http" that do
# not contain the current URL
for link in bs.find_all('a',
href=re.compile('^(http|www)((?!'+excludeUrl+').)*$')):
if link.attrs['href'] is not None:
if link.attrs['href'] not in externalLinks:
externalLinks.append(link.attrs['href'])
return externalLinks
def getRandomExternalLink(startingPage):
html = requests.get(startingPage)
bs = BeautifulSoup(html.text, 'html.parser')
externalLinks = getExternalLinks(bs,
urlparse(startingPage).netloc)
if len(externalLinks) == 0:
print('No external links, looking around the site for one.')
domain = f'{urlparse(startingPage).scheme}://{urlparse(startingPage).netloc}'
internalLinks = getInternalLinks(bs, domain)
return getRandomExternalLink(internalLinks[random.randint(0, len(internalLinks)-1)])
else:
return externalLinks[random.randint(0, len(externalLinks)-1)]
# Collects a list of all external URLs found on the site
allExtLinks = set()
allIntLinks = set()
def getAllExternalLinks(siteUrl):
html = requests.get(siteUrl)
domain = f"{urlparse(siteUrl).scheme}://{urlparse(siteUrl).netloc}"
bs = BeautifulSoup(html.text, 'html.parser')
internalLinks = getInternalLinks(bs, domain)
externalLinks = getExternalLinks(bs, domain)
for link in externalLinks:
if link not in allExtLinks:
allExtLinks.add(link)
print(link)
for link in internalLinks:
if link not in allIntLinks:
allIntLinks.add(link)
getAllExternalLinks(link)
def followExternalOnly(startingSite):
externalLink = getRandomExternalLink(startingSite)
print(f"Random external link is: {externalLink}")
followExternalOnly(externalLink)
|
normal
|
{
"blob_id": "5ddfeb49c16a7452c99126f1a837f3c0bed0ec10",
"index": 300,
"step-1": "<mask token>\n\n\ndef getExternalLinks(bs, excludeUrl):\n externalLinks = []\n for link in bs.find_all('a', href=re.compile('^(http|www)((?!' +\n excludeUrl + ').)*$')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in externalLinks:\n externalLinks.append(link.attrs['href'])\n return externalLinks\n\n\n<mask token>\n\n\ndef followExternalOnly(startingSite):\n externalLink = getRandomExternalLink(startingSite)\n print(f'Random external link is: {externalLink}')\n followExternalOnly(externalLink)\n",
"step-2": "<mask token>\n\n\ndef getExternalLinks(bs, excludeUrl):\n externalLinks = []\n for link in bs.find_all('a', href=re.compile('^(http|www)((?!' +\n excludeUrl + ').)*$')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in externalLinks:\n externalLinks.append(link.attrs['href'])\n return externalLinks\n\n\n<mask token>\n\n\ndef getAllExternalLinks(siteUrl):\n html = requests.get(siteUrl)\n domain = f'{urlparse(siteUrl).scheme}://{urlparse(siteUrl).netloc}'\n bs = BeautifulSoup(html.text, 'html.parser')\n internalLinks = getInternalLinks(bs, domain)\n externalLinks = getExternalLinks(bs, domain)\n for link in externalLinks:\n if link not in allExtLinks:\n allExtLinks.add(link)\n print(link)\n for link in internalLinks:\n if link not in allIntLinks:\n allIntLinks.add(link)\n getAllExternalLinks(link)\n\n\ndef followExternalOnly(startingSite):\n externalLink = getRandomExternalLink(startingSite)\n print(f'Random external link is: {externalLink}')\n followExternalOnly(externalLink)\n",
"step-3": "<mask token>\n\n\ndef getInternalLinks(bs, includeUrl):\n includeUrl = (\n f'{urlparse(includeUrl).scheme}://{urlparse(includeUrl).netloc}')\n internalLinks = []\n for link in bs.find_all('a', href=re.compile('^(/|.*' + includeUrl + ')')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in internalLinks:\n if link.attrs['href'].startswith('/'):\n internalLinks.append(includeUrl + link.attrs['href'])\n else:\n internalLinks.append(link.attrs['href'])\n return internalLinks\n\n\ndef getExternalLinks(bs, excludeUrl):\n externalLinks = []\n for link in bs.find_all('a', href=re.compile('^(http|www)((?!' +\n excludeUrl + ').)*$')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in externalLinks:\n externalLinks.append(link.attrs['href'])\n return externalLinks\n\n\n<mask token>\n\n\ndef getAllExternalLinks(siteUrl):\n html = requests.get(siteUrl)\n domain = f'{urlparse(siteUrl).scheme}://{urlparse(siteUrl).netloc}'\n bs = BeautifulSoup(html.text, 'html.parser')\n internalLinks = getInternalLinks(bs, domain)\n externalLinks = getExternalLinks(bs, domain)\n for link in externalLinks:\n if link not in allExtLinks:\n allExtLinks.add(link)\n print(link)\n for link in internalLinks:\n if link not in allIntLinks:\n allIntLinks.add(link)\n getAllExternalLinks(link)\n\n\ndef followExternalOnly(startingSite):\n externalLink = getRandomExternalLink(startingSite)\n print(f'Random external link is: {externalLink}')\n followExternalOnly(externalLink)\n",
"step-4": "<mask token>\n\n\ndef getInternalLinks(bs, includeUrl):\n includeUrl = (\n f'{urlparse(includeUrl).scheme}://{urlparse(includeUrl).netloc}')\n internalLinks = []\n for link in bs.find_all('a', href=re.compile('^(/|.*' + includeUrl + ')')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in internalLinks:\n if link.attrs['href'].startswith('/'):\n internalLinks.append(includeUrl + link.attrs['href'])\n else:\n internalLinks.append(link.attrs['href'])\n return internalLinks\n\n\ndef getExternalLinks(bs, excludeUrl):\n externalLinks = []\n for link in bs.find_all('a', href=re.compile('^(http|www)((?!' +\n excludeUrl + ').)*$')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in externalLinks:\n externalLinks.append(link.attrs['href'])\n return externalLinks\n\n\ndef getRandomExternalLink(startingPage):\n html = requests.get(startingPage)\n bs = BeautifulSoup(html.text, 'html.parser')\n externalLinks = getExternalLinks(bs, urlparse(startingPage).netloc)\n if len(externalLinks) == 0:\n print('No external links, looking around the site for one.')\n domain = (\n f'{urlparse(startingPage).scheme}://{urlparse(startingPage).netloc}'\n )\n internalLinks = getInternalLinks(bs, domain)\n return getRandomExternalLink(internalLinks[random.randint(0, len(\n internalLinks) - 1)])\n else:\n return externalLinks[random.randint(0, len(externalLinks) - 1)]\n\n\n<mask token>\n\n\ndef getAllExternalLinks(siteUrl):\n html = requests.get(siteUrl)\n domain = f'{urlparse(siteUrl).scheme}://{urlparse(siteUrl).netloc}'\n bs = BeautifulSoup(html.text, 'html.parser')\n internalLinks = getInternalLinks(bs, domain)\n externalLinks = getExternalLinks(bs, domain)\n for link in externalLinks:\n if link not in allExtLinks:\n allExtLinks.add(link)\n print(link)\n for link in internalLinks:\n if link not in allIntLinks:\n allIntLinks.add(link)\n getAllExternalLinks(link)\n\n\ndef followExternalOnly(startingSite):\n externalLink = getRandomExternalLink(startingSite)\n print(f'Random external link is: {externalLink}')\n followExternalOnly(externalLink)\n",
"step-5": "import requests\nfrom urllib.parse import urlparse\nfrom bs4 import BeautifulSoup\nimport re\nimport datetime\nimport random\n\npages = set()\n\n# Retrieve a list of all Internal links foound on a page.\ndef getInternalLinks(bs, includeUrl):\n includeUrl = f'{urlparse(includeUrl).scheme}://{urlparse(includeUrl).netloc}'\n internalLinks = []\n # Finds all links thhat begin with a \"/\"\n for link in bs.find_all('a',\n href=re.compile('^(/|.*'+includeUrl+')')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in internalLinks:\n if link.attrs['href'].startswith('/'):\n internalLinks.append(includeUrl+link.attrs['href'])\n else:\n internalLinks.append(link.attrs['href'])\n return internalLinks\n\n# Retrieves a list of all external links found on a pagee.\ndef getExternalLinks(bs, excludeUrl):\n externalLinks = []\n # Finds all links that starts with \"http\" that do\n # not contain the current URL\n for link in bs.find_all('a',\n href=re.compile('^(http|www)((?!'+excludeUrl+').)*$')):\n if link.attrs['href'] is not None:\n if link.attrs['href'] not in externalLinks:\n externalLinks.append(link.attrs['href'])\n return externalLinks\n\ndef getRandomExternalLink(startingPage):\n html = requests.get(startingPage)\n bs = BeautifulSoup(html.text, 'html.parser')\n externalLinks = getExternalLinks(bs, \n urlparse(startingPage).netloc)\n if len(externalLinks) == 0:\n print('No external links, looking around the site for one.')\n domain = f'{urlparse(startingPage).scheme}://{urlparse(startingPage).netloc}'\n internalLinks = getInternalLinks(bs, domain)\n return getRandomExternalLink(internalLinks[random.randint(0, len(internalLinks)-1)])\n else:\n return externalLinks[random.randint(0, len(externalLinks)-1)]\n\n# Collects a list of all external URLs found on the site\nallExtLinks = set()\nallIntLinks = set()\n\ndef getAllExternalLinks(siteUrl):\n html = requests.get(siteUrl)\n domain = f\"{urlparse(siteUrl).scheme}://{urlparse(siteUrl).netloc}\"\n bs = BeautifulSoup(html.text, 'html.parser')\n internalLinks = getInternalLinks(bs, domain)\n externalLinks = getExternalLinks(bs, domain)\n for link in externalLinks:\n if link not in allExtLinks:\n allExtLinks.add(link)\n print(link)\n for link in internalLinks:\n if link not in allIntLinks:\n allIntLinks.add(link)\n getAllExternalLinks(link)\n\n\ndef followExternalOnly(startingSite):\n externalLink = getRandomExternalLink(startingSite)\n print(f\"Random external link is: {externalLink}\")\n followExternalOnly(externalLink)\n\n\n",
"step-ids": [
2,
3,
4,
5,
8
]
}
|
[
2,
3,
4,
5,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.random.seed(1)
<|reserved_special_token_0|>
K.set_image_dim_ordering('th')
<|reserved_special_token_0|>
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
<|reserved_special_token_0|>
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
<|reserved_special_token_0|>
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
<|reserved_special_token_0|>
img.load()
<|reserved_special_token_0|>
print(data.shape)
<|reserved_special_token_0|>
print('Prediction begins.\n')
<|reserved_special_token_0|>
print(output)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'
<|reserved_special_token_0|>
np.random.seed(1)
<|reserved_special_token_0|>
K.set_image_dim_ordering('th')
weights_path = 'E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5'
top_model_weights_path = (
'E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5')
validation_data_dir = (
'E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg')
img_width = 200
img_height = 200
input_shape = 3, img_height, img_width
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
img = Image.open(validation_data_dir)
img.load()
data = np.asarray(img, dtype='int32')
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print('Prediction begins.\n')
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import os
os.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'
import theano
import numpy as np
np.random.seed(1)
import pandas as pd
import h5py
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from PIL import Image
K.set_image_dim_ordering('th')
weights_path = 'E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5'
top_model_weights_path = (
'E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5')
validation_data_dir = (
'E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg')
img_width = 200
img_height = 200
input_shape = 3, img_height, img_width
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
assert os.path.exists(weights_path
), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print('Model loaded.\n')
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
top_model.load_weights(top_model_weights_path)
model.add(top_model)
print('Final Model Assembled.\n')
img = Image.open(validation_data_dir)
img.load()
data = np.asarray(img, dtype='int32')
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print('Prediction begins.\n')
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 5 02:39:55 2017
@author: sparsh
"""
"""
Crop Disease Classification Project for Code Fun Do 2017 - IIT Roorkee
"""
"""
File for predicting a test image.
"""
import os
os.environ['THEANO_FLAGS'] = "device=gpu1, floatX=float32"
import theano
import numpy as np
np.random.seed(1)
import pandas as pd
import h5py
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
from PIL import Image
K.set_image_dim_ordering('th')
#Path to model weights file
weights_path = "E:\\Interesting\\Code Fun Do 2017\\vgg16_weights.h5"
top_model_weights_path = "E:\\Interesting\\Code Fun Do 2017\\bottleneck_fc_model.h5"
#Unknown Image Location
validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial\\cercospora_leaf_spot_365.jpg"
#validation_data_dir = "E:\\Interesting\\Code Fun Do 2017\\Trial"
#input image dimensions
img_width = 200
img_height = 200
input_shape = (3, img_height, img_width)
#Model parameters
batch_size = 32
nb_classes = 4
nb_epoch = 3
nb_train_samples = 50
nb_validation_samples = 25
# build the VGG16 network
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
# load the weights of the VGG16 networks
# (trained on ImageNet, won the ILSVRC competition in 2014)
# note: when there is a complete match between your model definition
# and your weight savefile, you can simply call model.load_weights(filename)
assert os.path.exists(weights_path), "Model weights not found (see 'weights_path' variable in script)."
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
# we don't look at the last (fully-connected) layers in the savefile
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
model.layers[k].set_weights(weights)
f.close()
print("Model loaded.\n")
# build a classifier model to put on top of the convolutional model
top_model = Sequential()
top_model.add(Flatten(input_shape=model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(nb_classes, activation='softmax'))
# note that it is necessary to start with a fully-trained
# classifier, including the top classifier,
# in order to successfully do fine-tuning
top_model.load_weights(top_model_weights_path)
# add the model on top of the convolutional base
model.add(top_model)
#print("DC.\n")
print("Final Model Assembled.\n")
#datagen = ImageDataGenerator(rescale=1./255)
#generator = datagen.flow_from_directory(
# validation_data_dir,
# target_size=(img_width, img_height),
# batch_size=32,
# class_mode=None,
# shuffle=False)
#bottleneck_features_validation = model.predict_generator(generator, nb_validation_samples)
#np.save(open('bottleneck_features_validation.npy', 'w'), bottleneck_features_validation)
#print("Testing features stored.\n")
#data = np.load(open('bottleneck_features_validation.npy'))
img = Image.open(validation_data_dir)
img.load()
#print("chutiya.\n")
data = np.asarray(img, dtype="int32")
#print("harami.\n")
print(data.shape)
data = data.reshape(1, 3, 200, 200)
print("Prediction begins.\n")
output = model.predict_classes(data, batch_size=32, verbose=1)
print(output)
|
flexible
|
{
"blob_id": "96210942b01c510300120913bed1bc6d497a39a9",
"index": 1945,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.random.seed(1)\n<mask token>\nK.set_image_dim_ordering('th')\n<mask token>\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\n<mask token>\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\n<mask token>\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\n<mask token>\nimg.load()\n<mask token>\nprint(data.shape)\n<mask token>\nprint('Prediction begins.\\n')\n<mask token>\nprint(output)\n",
"step-3": "<mask token>\nos.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'\n<mask token>\nnp.random.seed(1)\n<mask token>\nK.set_image_dim_ordering('th')\nweights_path = 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5'\ntop_model_weights_path = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5')\nvalidation_data_dir = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg')\nimg_width = 200\nimg_height = 200\ninput_shape = 3, img_height, img_width\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\nimg = Image.open(validation_data_dir)\nimg.load()\ndata = np.asarray(img, dtype='int32')\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint('Prediction begins.\\n')\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)\n",
"step-4": "<mask token>\nimport os\nos.environ['THEANO_FLAGS'] = 'device=gpu1, floatX=float32'\nimport theano\nimport numpy as np\nnp.random.seed(1)\nimport pandas as pd\nimport h5py\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D\nfrom keras.layers import Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\nfrom PIL import Image\nK.set_image_dim_ordering('th')\nweights_path = 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5'\ntop_model_weights_path = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5')\nvalidation_data_dir = (\n 'E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg')\nimg_width = 200\nimg_height = 200\ninput_shape = 3, img_height, img_width\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\nassert os.path.exists(weights_path\n ), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint('Model loaded.\\n')\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\ntop_model.load_weights(top_model_weights_path)\nmodel.add(top_model)\nprint('Final Model Assembled.\\n')\nimg = Image.open(validation_data_dir)\nimg.load()\ndata = np.asarray(img, dtype='int32')\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint('Prediction begins.\\n')\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Mar 5 02:39:55 2017\n\n@author: sparsh\n\"\"\"\n\n\"\"\"\nCrop Disease Classification Project for Code Fun Do 2017 - IIT Roorkee\n\"\"\"\n\n\"\"\"\nFile for predicting a test image.\n\"\"\"\n\nimport os\nos.environ['THEANO_FLAGS'] = \"device=gpu1, floatX=float32\"\nimport theano\nimport numpy as np\nnp.random.seed(1)\n\nimport pandas as pd\nimport h5py\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.models import Sequential\nfrom keras.layers import Convolution2D, MaxPooling2D, ZeroPadding2D\nfrom keras.layers import Activation, Dropout, Flatten, Dense\nfrom keras import backend as K\nfrom PIL import Image\nK.set_image_dim_ordering('th')\n\n#Path to model weights file\nweights_path = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\vgg16_weights.h5\"\ntop_model_weights_path = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\bottleneck_fc_model.h5\"\n\n#Unknown Image Location\nvalidation_data_dir = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\\\\cercospora_leaf_spot_365.jpg\"\n#validation_data_dir = \"E:\\\\Interesting\\\\Code Fun Do 2017\\\\Trial\"\n\n#input image dimensions\nimg_width = 200\nimg_height = 200\ninput_shape = (3, img_height, img_width)\n\n#Model parameters\nbatch_size = 32\nnb_classes = 4\nnb_epoch = 3\nnb_train_samples = 50\nnb_validation_samples = 25\n\n# build the VGG16 network\nmodel = Sequential()\nmodel.add(ZeroPadding2D((1, 1), input_shape=(3, img_width, img_height)))\n\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))\nmodel.add(ZeroPadding2D((1, 1)))\nmodel.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))\nmodel.add(MaxPooling2D((2, 2), strides=(2, 2)))\n\n# load the weights of the VGG16 networks\n# (trained on ImageNet, won the ILSVRC competition in 2014)\n# note: when there is a complete match between your model definition\n# and your weight savefile, you can simply call model.load_weights(filename)\nassert os.path.exists(weights_path), \"Model weights not found (see 'weights_path' variable in script).\"\nf = h5py.File(weights_path)\nfor k in range(f.attrs['nb_layers']):\n if k >= len(model.layers):\n # we don't look at the last (fully-connected) layers in the savefile\n break\n g = f['layer_{}'.format(k)]\n weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]\n model.layers[k].set_weights(weights)\nf.close()\nprint(\"Model loaded.\\n\")\n\n# build a classifier model to put on top of the convolutional model\ntop_model = Sequential()\ntop_model.add(Flatten(input_shape=model.output_shape[1:]))\ntop_model.add(Dense(256, activation='relu'))\ntop_model.add(Dropout(0.5))\ntop_model.add(Dense(nb_classes, activation='softmax'))\n\n# note that it is necessary to start with a fully-trained\n# classifier, including the top classifier,\n# in order to successfully do fine-tuning\ntop_model.load_weights(top_model_weights_path)\n\n# add the model on top of the convolutional base\nmodel.add(top_model)\n#print(\"DC.\\n\")\nprint(\"Final Model Assembled.\\n\")\n\n#datagen = ImageDataGenerator(rescale=1./255)\n#generator = datagen.flow_from_directory(\n# validation_data_dir,\n# target_size=(img_width, img_height),\n# batch_size=32,\n# class_mode=None,\n# shuffle=False)\n#bottleneck_features_validation = model.predict_generator(generator, nb_validation_samples)\n#np.save(open('bottleneck_features_validation.npy', 'w'), bottleneck_features_validation)\n#print(\"Testing features stored.\\n\")\n\n#data = np.load(open('bottleneck_features_validation.npy'))\nimg = Image.open(validation_data_dir)\n\nimg.load()\n#print(\"chutiya.\\n\")\ndata = np.asarray(img, dtype=\"int32\")\n#print(\"harami.\\n\")\nprint(data.shape)\ndata = data.reshape(1, 3, 200, 200)\nprint(\"Prediction begins.\\n\")\noutput = model.predict_classes(data, batch_size=32, verbose=1)\nprint(output)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.shortcuts import render, redirect, get_object_or_404
from .models import Article, Comment
# from IPython import embed
# Create your views here.
def article_list(request):
articles = Article.objects.all()
return render(request, 'board/list.html', {
'articles': articles,
})
def article_detail(request, article_id):
article = get_object_or_404(Article, id=article_id)
comments = article.comment_set.all()
return render(request, 'board/detail.html', {
'article': article,
'comments': comments,
})
# def new_article(request):
# return render(request, 'board/new.html')
def create_article(request):
if request.method == 'GET':
return render(request, 'board/new.html')
else: # request.method == 'POST'
article = Article()
article.title = request.POST.get('title')
article.content = request.POST.get('content')
article.save()
return redirect('board:article_detail', article.id)
# def edit_article(request, article_id):
# pass
def update_article(request, article_id):
if request.method == 'GET':
article = get_object_or_404(Article, id=article_id)
return render(request, 'board/edit.html', {
'article': article,
})
else: # request.method == 'POST'
article = get_object_or_404(Article, id=article_id)
article.title = request.POST.get('title')
article.content = request.POST.get('content')
article.save()
return redirect('board:article_detail', article.id)
def delete_article(request, article_id):
if request.method == 'POST':
article = get_object_or_404(Article, id=article_id)
article.delete()
return redirect('board:article_list')
def create_comment(request, article_id):
if request.method == 'POST':
comment = Comment()
comment.article = get_object_or_404(Article, id=article_id)
comment.content = request.POST.get('comment')
comment.save()
return redirect('board:article_detail', article_id)
def delete_comment(request, article_id, comment_id):
if request.method == 'POST':
comment = get_object_or_404(Comment, id=comment_id)
comment.delete()
return redirect('board:article_detail', article_id)
|
normal
|
{
"blob_id": "6946601050802aaaa559d25612d0d4f5116559eb",
"index": 1845,
"step-1": "<mask token>\n\n\ndef article_list(request):\n articles = Article.objects.all()\n return render(request, 'board/list.html', {'articles': articles})\n\n\ndef article_detail(request, article_id):\n article = get_object_or_404(Article, id=article_id)\n comments = article.comment_set.all()\n return render(request, 'board/detail.html', {'article': article,\n 'comments': comments})\n\n\n<mask token>\n\n\ndef update_article(request, article_id):\n if request.method == 'GET':\n article = get_object_or_404(Article, id=article_id)\n return render(request, 'board/edit.html', {'article': article})\n else:\n article = get_object_or_404(Article, id=article_id)\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\n<mask token>\n\n\ndef create_comment(request, article_id):\n if request.method == 'POST':\n comment = Comment()\n comment.article = get_object_or_404(Article, id=article_id)\n comment.content = request.POST.get('comment')\n comment.save()\n return redirect('board:article_detail', article_id)\n\n\ndef delete_comment(request, article_id, comment_id):\n if request.method == 'POST':\n comment = get_object_or_404(Comment, id=comment_id)\n comment.delete()\n return redirect('board:article_detail', article_id)\n",
"step-2": "<mask token>\n\n\ndef article_list(request):\n articles = Article.objects.all()\n return render(request, 'board/list.html', {'articles': articles})\n\n\ndef article_detail(request, article_id):\n article = get_object_or_404(Article, id=article_id)\n comments = article.comment_set.all()\n return render(request, 'board/detail.html', {'article': article,\n 'comments': comments})\n\n\ndef create_article(request):\n if request.method == 'GET':\n return render(request, 'board/new.html')\n else:\n article = Article()\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef update_article(request, article_id):\n if request.method == 'GET':\n article = get_object_or_404(Article, id=article_id)\n return render(request, 'board/edit.html', {'article': article})\n else:\n article = get_object_or_404(Article, id=article_id)\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\n<mask token>\n\n\ndef create_comment(request, article_id):\n if request.method == 'POST':\n comment = Comment()\n comment.article = get_object_or_404(Article, id=article_id)\n comment.content = request.POST.get('comment')\n comment.save()\n return redirect('board:article_detail', article_id)\n\n\ndef delete_comment(request, article_id, comment_id):\n if request.method == 'POST':\n comment = get_object_or_404(Comment, id=comment_id)\n comment.delete()\n return redirect('board:article_detail', article_id)\n",
"step-3": "<mask token>\n\n\ndef article_list(request):\n articles = Article.objects.all()\n return render(request, 'board/list.html', {'articles': articles})\n\n\ndef article_detail(request, article_id):\n article = get_object_or_404(Article, id=article_id)\n comments = article.comment_set.all()\n return render(request, 'board/detail.html', {'article': article,\n 'comments': comments})\n\n\ndef create_article(request):\n if request.method == 'GET':\n return render(request, 'board/new.html')\n else:\n article = Article()\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef update_article(request, article_id):\n if request.method == 'GET':\n article = get_object_or_404(Article, id=article_id)\n return render(request, 'board/edit.html', {'article': article})\n else:\n article = get_object_or_404(Article, id=article_id)\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef delete_article(request, article_id):\n if request.method == 'POST':\n article = get_object_or_404(Article, id=article_id)\n article.delete()\n return redirect('board:article_list')\n\n\ndef create_comment(request, article_id):\n if request.method == 'POST':\n comment = Comment()\n comment.article = get_object_or_404(Article, id=article_id)\n comment.content = request.POST.get('comment')\n comment.save()\n return redirect('board:article_detail', article_id)\n\n\ndef delete_comment(request, article_id, comment_id):\n if request.method == 'POST':\n comment = get_object_or_404(Comment, id=comment_id)\n comment.delete()\n return redirect('board:article_detail', article_id)\n",
"step-4": "from django.shortcuts import render, redirect, get_object_or_404\nfrom .models import Article, Comment\n\n\ndef article_list(request):\n articles = Article.objects.all()\n return render(request, 'board/list.html', {'articles': articles})\n\n\ndef article_detail(request, article_id):\n article = get_object_or_404(Article, id=article_id)\n comments = article.comment_set.all()\n return render(request, 'board/detail.html', {'article': article,\n 'comments': comments})\n\n\ndef create_article(request):\n if request.method == 'GET':\n return render(request, 'board/new.html')\n else:\n article = Article()\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef update_article(request, article_id):\n if request.method == 'GET':\n article = get_object_or_404(Article, id=article_id)\n return render(request, 'board/edit.html', {'article': article})\n else:\n article = get_object_or_404(Article, id=article_id)\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef delete_article(request, article_id):\n if request.method == 'POST':\n article = get_object_or_404(Article, id=article_id)\n article.delete()\n return redirect('board:article_list')\n\n\ndef create_comment(request, article_id):\n if request.method == 'POST':\n comment = Comment()\n comment.article = get_object_or_404(Article, id=article_id)\n comment.content = request.POST.get('comment')\n comment.save()\n return redirect('board:article_detail', article_id)\n\n\ndef delete_comment(request, article_id, comment_id):\n if request.method == 'POST':\n comment = get_object_or_404(Comment, id=comment_id)\n comment.delete()\n return redirect('board:article_detail', article_id)\n",
"step-5": "from django.shortcuts import render, redirect, get_object_or_404\nfrom .models import Article, Comment\n# from IPython import embed\n\n# Create your views here.\n\n\ndef article_list(request):\n articles = Article.objects.all()\n return render(request, 'board/list.html', {\n 'articles': articles,\n })\n\n\ndef article_detail(request, article_id):\n article = get_object_or_404(Article, id=article_id)\n comments = article.comment_set.all()\n return render(request, 'board/detail.html', {\n 'article': article,\n 'comments': comments,\n })\n\n\n# def new_article(request):\n# return render(request, 'board/new.html')\n\n\ndef create_article(request):\n if request.method == 'GET':\n return render(request, 'board/new.html')\n else: # request.method == 'POST'\n article = Article()\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\n# def edit_article(request, article_id):\n# pass\n\n\ndef update_article(request, article_id):\n if request.method == 'GET':\n article = get_object_or_404(Article, id=article_id)\n return render(request, 'board/edit.html', {\n 'article': article,\n })\n\n else: # request.method == 'POST'\n article = get_object_or_404(Article, id=article_id)\n article.title = request.POST.get('title')\n article.content = request.POST.get('content')\n article.save()\n return redirect('board:article_detail', article.id)\n\n\ndef delete_article(request, article_id):\n if request.method == 'POST':\n article = get_object_or_404(Article, id=article_id)\n article.delete()\n return redirect('board:article_list')\n\n\ndef create_comment(request, article_id):\n if request.method == 'POST':\n comment = Comment()\n comment.article = get_object_or_404(Article, id=article_id)\n comment.content = request.POST.get('comment')\n\n comment.save()\n return redirect('board:article_detail', article_id)\n\n\ndef delete_comment(request, article_id, comment_id):\n if request.method == 'POST':\n comment = get_object_or_404(Comment, id=comment_id)\n comment.delete()\n\n return redirect('board:article_detail', article_id)\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
from django.db import models
# Create your models here.
class UserInfo(models.Model):
uname = models.CharField('用户名', max_length=50, null=False)
upassword = models.CharField('密码', max_length=200, null=False)
email = models.CharField('邮箱', max_length=50, null=True)
phone = models.CharField('手机号', max_length=20, null=False)
time = models.DateTimeField('注册时间', auto_now=True)
isban = models.BooleanField('禁用', default=False)
isdelete = models.BooleanField('删除', default=False)
def __str__(self):
return self.uname
class Meta:
verbose_name = '用户'
verbose_name_plural = verbose_name
class Address(models.Model):
aname = models.CharField('收货人', max_length=50, null=False)
ads = models.CharField('地址', max_length=300, null=False)
phone = models.CharField('电话', max_length=20, null=False)
user = models.ForeignKey(UserInfo)
def __str__(self):
return self.aname
class Meta:
verbose_name = '收货地址'
verbose_name_plural = verbose_name
|
normal
|
{
"blob_id": "dbec74ecf488ca98f3f441e252f79bc2bc0959c1",
"index": 4068,
"step-1": "<mask token>\n\n\nclass UserInfo(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n verbose_name = '用户'\n verbose_name_plural = verbose_name\n\n\nclass Address(models.Model):\n aname = models.CharField('收货人', max_length=50, null=False)\n ads = models.CharField('地址', max_length=300, null=False)\n phone = models.CharField('电话', max_length=20, null=False)\n user = models.ForeignKey(UserInfo)\n\n def __str__(self):\n return self.aname\n\n\n class Meta:\n verbose_name = '收货地址'\n verbose_name_plural = verbose_name\n",
"step-2": "<mask token>\n\n\nclass UserInfo(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.uname\n\n\n class Meta:\n verbose_name = '用户'\n verbose_name_plural = verbose_name\n\n\nclass Address(models.Model):\n aname = models.CharField('收货人', max_length=50, null=False)\n ads = models.CharField('地址', max_length=300, null=False)\n phone = models.CharField('电话', max_length=20, null=False)\n user = models.ForeignKey(UserInfo)\n\n def __str__(self):\n return self.aname\n\n\n class Meta:\n verbose_name = '收货地址'\n verbose_name_plural = verbose_name\n",
"step-3": "<mask token>\n\n\nclass UserInfo(models.Model):\n uname = models.CharField('用户名', max_length=50, null=False)\n upassword = models.CharField('密码', max_length=200, null=False)\n email = models.CharField('邮箱', max_length=50, null=True)\n phone = models.CharField('手机号', max_length=20, null=False)\n time = models.DateTimeField('注册时间', auto_now=True)\n isban = models.BooleanField('禁用', default=False)\n isdelete = models.BooleanField('删除', default=False)\n\n def __str__(self):\n return self.uname\n\n\n class Meta:\n verbose_name = '用户'\n verbose_name_plural = verbose_name\n\n\nclass Address(models.Model):\n aname = models.CharField('收货人', max_length=50, null=False)\n ads = models.CharField('地址', max_length=300, null=False)\n phone = models.CharField('电话', max_length=20, null=False)\n user = models.ForeignKey(UserInfo)\n\n def __str__(self):\n return self.aname\n\n\n class Meta:\n verbose_name = '收货地址'\n verbose_name_plural = verbose_name\n",
"step-4": "from django.db import models\n\n\nclass UserInfo(models.Model):\n uname = models.CharField('用户名', max_length=50, null=False)\n upassword = models.CharField('密码', max_length=200, null=False)\n email = models.CharField('邮箱', max_length=50, null=True)\n phone = models.CharField('手机号', max_length=20, null=False)\n time = models.DateTimeField('注册时间', auto_now=True)\n isban = models.BooleanField('禁用', default=False)\n isdelete = models.BooleanField('删除', default=False)\n\n def __str__(self):\n return self.uname\n\n\n class Meta:\n verbose_name = '用户'\n verbose_name_plural = verbose_name\n\n\nclass Address(models.Model):\n aname = models.CharField('收货人', max_length=50, null=False)\n ads = models.CharField('地址', max_length=300, null=False)\n phone = models.CharField('电话', max_length=20, null=False)\n user = models.ForeignKey(UserInfo)\n\n def __str__(self):\n return self.aname\n\n\n class Meta:\n verbose_name = '收货地址'\n verbose_name_plural = verbose_name\n",
"step-5": "from django.db import models\n\n# Create your models here.\nclass UserInfo(models.Model):\n uname = models.CharField('用户名', max_length=50, null=False)\n upassword = models.CharField('密码', max_length=200, null=False)\n email = models.CharField('邮箱', max_length=50, null=True)\n phone = models.CharField('手机号', max_length=20, null=False)\n time = models.DateTimeField('注册时间', auto_now=True)\n isban = models.BooleanField('禁用', default=False)\n isdelete = models.BooleanField('删除', default=False)\n\n def __str__(self):\n return self.uname\n\n class Meta:\n verbose_name = '用户'\n verbose_name_plural = verbose_name\n\n\nclass Address(models.Model):\n aname = models.CharField('收货人', max_length=50, null=False)\n ads = models.CharField('地址', max_length=300, null=False)\n phone = models.CharField('电话', max_length=20, null=False)\n user = models.ForeignKey(UserInfo)\n\n def __str__(self):\n return self.aname\n\n class Meta:\n verbose_name = '收货地址'\n verbose_name_plural = verbose_name\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('-sd', '--startdate', help=
'Date to start scheduling trials, format is MM/DD.', required=True)
ap.add_argument('-r', '--round', help='A number.', required=True)
ap.add_argument('-hs', '--hsched', help=
'Which high schedule to use (e.g. H1, H2, H3)', required=True)
ap.add_argument('-ls', '--lsched', help=
'Which low schedule to use (e.g. H1, H2, H3)', required=True)
ap.add_argument('-h1', '--hfish1', help=
'1st Fish that will be assigned H schedule', required=True)
ap.add_argument('-h2', '--hfish2', help=
'2nd Fish that will be assigned H schedule', required=True)
ap.add_argument('-h3', '--hfish3', help=
'3rd Fish that will be assigned H schedule', required=True)
ap.add_argument('-l1', '--lfish1', help=
'1st Fish that will be assigned L schedule', required=True)
ap.add_argument('-l2', '--lfish2', help=
'2nd Fish that will be assigned L schedule', required=True)
ap.add_argument('-l3', '--lfish3', help=
'3rd Fish that will be assigned L schedule', required=True)
args = vars(ap.parse_args())
a_dict = {'startDate': args['startdate'], 'round': args['round'],
'h_schedule': args['hsched'], 'l_schedule': args['lsched'],
'mapping': {'H': {'fish1': args['hfish1'], 'fish2': args['hfish2'],
'fish3': args['hfish3']}, 'L': {'fish1': args['lfish1'], 'fish2':
args['lfish2'], 'fish3': args['lfish3']}}}
os.remove('top.json')
with open('top.json', 'w') as f:
json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))
sys.exit(0)
<|reserved_special_token_1|>
import json
import argparse
import sys
import os
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('-sd', '--startdate', help=
'Date to start scheduling trials, format is MM/DD.', required=True)
ap.add_argument('-r', '--round', help='A number.', required=True)
ap.add_argument('-hs', '--hsched', help=
'Which high schedule to use (e.g. H1, H2, H3)', required=True)
ap.add_argument('-ls', '--lsched', help=
'Which low schedule to use (e.g. H1, H2, H3)', required=True)
ap.add_argument('-h1', '--hfish1', help=
'1st Fish that will be assigned H schedule', required=True)
ap.add_argument('-h2', '--hfish2', help=
'2nd Fish that will be assigned H schedule', required=True)
ap.add_argument('-h3', '--hfish3', help=
'3rd Fish that will be assigned H schedule', required=True)
ap.add_argument('-l1', '--lfish1', help=
'1st Fish that will be assigned L schedule', required=True)
ap.add_argument('-l2', '--lfish2', help=
'2nd Fish that will be assigned L schedule', required=True)
ap.add_argument('-l3', '--lfish3', help=
'3rd Fish that will be assigned L schedule', required=True)
args = vars(ap.parse_args())
a_dict = {'startDate': args['startdate'], 'round': args['round'],
'h_schedule': args['hsched'], 'l_schedule': args['lsched'],
'mapping': {'H': {'fish1': args['hfish1'], 'fish2': args['hfish2'],
'fish3': args['hfish3']}, 'L': {'fish1': args['lfish1'], 'fish2':
args['lfish2'], 'fish3': args['lfish3']}}}
os.remove('top.json')
with open('top.json', 'w') as f:
json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))
sys.exit(0)
<|reserved_special_token_1|>
import json
import argparse
import sys
import os
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-sd","--startdate", help="Date to start scheduling trials, format is MM/DD.", required=True)
ap.add_argument("-r", "--round",help="A number.", required=True)
ap.add_argument("-hs", "--hsched", help="Which high schedule to use (e.g. H1, H2, H3)", required=True)
ap.add_argument("-ls", "--lsched", help="Which low schedule to use (e.g. H1, H2, H3)", required=True)
ap.add_argument("-h1", "--hfish1", help="1st Fish that will be assigned H schedule", required=True)
ap.add_argument("-h2", "--hfish2", help="2nd Fish that will be assigned H schedule", required=True)
ap.add_argument("-h3", "--hfish3", help="3rd Fish that will be assigned H schedule", required=True)
ap.add_argument("-l1", "--lfish1", help="1st Fish that will be assigned L schedule", required=True)
ap.add_argument("-l2", "--lfish2", help="2nd Fish that will be assigned L schedule", required=True)
ap.add_argument("-l3", "--lfish3", help="3rd Fish that will be assigned L schedule", required=True)
args = vars(ap.parse_args())
a_dict = {"startDate": args["startdate"], "round": args["round"], "h_schedule": args["hsched"], "l_schedule": args["lsched"], "mapping": {"H": { "fish1" : args["hfish1"], "fish2": args["hfish2"], "fish3": args["hfish3"]}, "L": { "fish1" : args["lfish1"], "fish2": args["lfish2"], "fish3": args["lfish3"]}}}
#print a_dict
os.remove('top.json')
with open('top.json', 'w') as f:
json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))
sys.exit(0)
|
flexible
|
{
"blob_id": "e4767d8a4991a1180cc185c4c2d77104d63f9c7a",
"index": 6858,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n ap = argparse.ArgumentParser()\n ap.add_argument('-sd', '--startdate', help=\n 'Date to start scheduling trials, format is MM/DD.', required=True)\n ap.add_argument('-r', '--round', help='A number.', required=True)\n ap.add_argument('-hs', '--hsched', help=\n 'Which high schedule to use (e.g. H1, H2, H3)', required=True)\n ap.add_argument('-ls', '--lsched', help=\n 'Which low schedule to use (e.g. H1, H2, H3)', required=True)\n ap.add_argument('-h1', '--hfish1', help=\n '1st Fish that will be assigned H schedule', required=True)\n ap.add_argument('-h2', '--hfish2', help=\n '2nd Fish that will be assigned H schedule', required=True)\n ap.add_argument('-h3', '--hfish3', help=\n '3rd Fish that will be assigned H schedule', required=True)\n ap.add_argument('-l1', '--lfish1', help=\n '1st Fish that will be assigned L schedule', required=True)\n ap.add_argument('-l2', '--lfish2', help=\n '2nd Fish that will be assigned L schedule', required=True)\n ap.add_argument('-l3', '--lfish3', help=\n '3rd Fish that will be assigned L schedule', required=True)\n args = vars(ap.parse_args())\n a_dict = {'startDate': args['startdate'], 'round': args['round'],\n 'h_schedule': args['hsched'], 'l_schedule': args['lsched'],\n 'mapping': {'H': {'fish1': args['hfish1'], 'fish2': args['hfish2'],\n 'fish3': args['hfish3']}, 'L': {'fish1': args['lfish1'], 'fish2':\n args['lfish2'], 'fish3': args['lfish3']}}}\n os.remove('top.json')\n with open('top.json', 'w') as f:\n json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))\n sys.exit(0)\n",
"step-3": "import json\nimport argparse\nimport sys\nimport os\nif __name__ == '__main__':\n ap = argparse.ArgumentParser()\n ap.add_argument('-sd', '--startdate', help=\n 'Date to start scheduling trials, format is MM/DD.', required=True)\n ap.add_argument('-r', '--round', help='A number.', required=True)\n ap.add_argument('-hs', '--hsched', help=\n 'Which high schedule to use (e.g. H1, H2, H3)', required=True)\n ap.add_argument('-ls', '--lsched', help=\n 'Which low schedule to use (e.g. H1, H2, H3)', required=True)\n ap.add_argument('-h1', '--hfish1', help=\n '1st Fish that will be assigned H schedule', required=True)\n ap.add_argument('-h2', '--hfish2', help=\n '2nd Fish that will be assigned H schedule', required=True)\n ap.add_argument('-h3', '--hfish3', help=\n '3rd Fish that will be assigned H schedule', required=True)\n ap.add_argument('-l1', '--lfish1', help=\n '1st Fish that will be assigned L schedule', required=True)\n ap.add_argument('-l2', '--lfish2', help=\n '2nd Fish that will be assigned L schedule', required=True)\n ap.add_argument('-l3', '--lfish3', help=\n '3rd Fish that will be assigned L schedule', required=True)\n args = vars(ap.parse_args())\n a_dict = {'startDate': args['startdate'], 'round': args['round'],\n 'h_schedule': args['hsched'], 'l_schedule': args['lsched'],\n 'mapping': {'H': {'fish1': args['hfish1'], 'fish2': args['hfish2'],\n 'fish3': args['hfish3']}, 'L': {'fish1': args['lfish1'], 'fish2':\n args['lfish2'], 'fish3': args['lfish3']}}}\n os.remove('top.json')\n with open('top.json', 'w') as f:\n json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))\n sys.exit(0)\n",
"step-4": "import json\nimport argparse\nimport sys\nimport os\n\nif __name__ == '__main__':\n\n ap = argparse.ArgumentParser()\n ap.add_argument(\"-sd\",\"--startdate\", help=\"Date to start scheduling trials, format is MM/DD.\", required=True)\n ap.add_argument(\"-r\", \"--round\",help=\"A number.\", required=True)\n ap.add_argument(\"-hs\", \"--hsched\", help=\"Which high schedule to use (e.g. H1, H2, H3)\", required=True)\n ap.add_argument(\"-ls\", \"--lsched\", help=\"Which low schedule to use (e.g. H1, H2, H3)\", required=True)\n ap.add_argument(\"-h1\", \"--hfish1\", help=\"1st Fish that will be assigned H schedule\", required=True)\n ap.add_argument(\"-h2\", \"--hfish2\", help=\"2nd Fish that will be assigned H schedule\", required=True)\n ap.add_argument(\"-h3\", \"--hfish3\", help=\"3rd Fish that will be assigned H schedule\", required=True)\n ap.add_argument(\"-l1\", \"--lfish1\", help=\"1st Fish that will be assigned L schedule\", required=True)\n ap.add_argument(\"-l2\", \"--lfish2\", help=\"2nd Fish that will be assigned L schedule\", required=True)\n ap.add_argument(\"-l3\", \"--lfish3\", help=\"3rd Fish that will be assigned L schedule\", required=True)\n\n args = vars(ap.parse_args())\n\n a_dict = {\"startDate\": args[\"startdate\"], \"round\": args[\"round\"], \"h_schedule\": args[\"hsched\"], \"l_schedule\": args[\"lsched\"], \"mapping\": {\"H\": { \"fish1\" : args[\"hfish1\"], \"fish2\": args[\"hfish2\"], \"fish3\": args[\"hfish3\"]}, \"L\": { \"fish1\" : args[\"lfish1\"], \"fish2\": args[\"lfish2\"], \"fish3\": args[\"lfish3\"]}}}\n\n #print a_dict\n\n os.remove('top.json')\n\n with open('top.json', 'w') as f:\n json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))\n\n sys.exit(0)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def getIntersectionNode(self, headA, headB):
"""
:type head1, head1: ListNode
:rtype: ListNode
"""
if not headA or not headB:
return None
l1 = self.linkList_to_list(headA)
l2 = self.linkList_to_list(headB)
length = len(l1) if len(l1) < len(l2) else len(l2)
index = 0
for i in range(1, length + 1):
if l1[-i] == l2[-i]:
index = i
if not index < length + 1:
return None
return self.get_nth_node(headA, len(l1) - index + 1)
def linkList_to_list(self, head):
if not head:
return []
l = []
while head:
l.append(head.val)
head = head.next
return l
def get_nth_node(self, head, n):
try:
c = 1
while c < n:
head = head.next
c += 1
return head
except IndexError:
return None
|
normal
|
{
"blob_id": "66f60eb86137203a74656be13b631384eba30c84",
"index": 1681,
"step-1": "class Solution(object):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class Solution(object):\n\n def getIntersectionNode(self, headA, headB):\n \"\"\"\n :type head1, head1: ListNode\n :rtype: ListNode\n \"\"\"\n if not headA or not headB:\n return None\n l1 = self.linkList_to_list(headA)\n l2 = self.linkList_to_list(headB)\n length = len(l1) if len(l1) < len(l2) else len(l2)\n index = 0\n for i in range(1, length + 1):\n if l1[-i] == l2[-i]:\n index = i\n if not index < length + 1:\n return None\n return self.get_nth_node(headA, len(l1) - index + 1)\n <mask token>\n <mask token>\n",
"step-3": "class Solution(object):\n\n def getIntersectionNode(self, headA, headB):\n \"\"\"\n :type head1, head1: ListNode\n :rtype: ListNode\n \"\"\"\n if not headA or not headB:\n return None\n l1 = self.linkList_to_list(headA)\n l2 = self.linkList_to_list(headB)\n length = len(l1) if len(l1) < len(l2) else len(l2)\n index = 0\n for i in range(1, length + 1):\n if l1[-i] == l2[-i]:\n index = i\n if not index < length + 1:\n return None\n return self.get_nth_node(headA, len(l1) - index + 1)\n <mask token>\n\n def get_nth_node(self, head, n):\n try:\n c = 1\n while c < n:\n head = head.next\n c += 1\n return head\n except IndexError:\n return None\n",
"step-4": "class Solution(object):\n\n def getIntersectionNode(self, headA, headB):\n \"\"\"\n :type head1, head1: ListNode\n :rtype: ListNode\n \"\"\"\n if not headA or not headB:\n return None\n l1 = self.linkList_to_list(headA)\n l2 = self.linkList_to_list(headB)\n length = len(l1) if len(l1) < len(l2) else len(l2)\n index = 0\n for i in range(1, length + 1):\n if l1[-i] == l2[-i]:\n index = i\n if not index < length + 1:\n return None\n return self.get_nth_node(headA, len(l1) - index + 1)\n\n def linkList_to_list(self, head):\n if not head:\n return []\n l = []\n while head:\n l.append(head.val)\n head = head.next\n return l\n\n def get_nth_node(self, head, n):\n try:\n c = 1\n while c < n:\n head = head.next\n c += 1\n return head\n except IndexError:\n return None\n",
"step-5": "# Definition for singly-linked list.\n# class ListNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.next = None\n\nclass Solution(object):\n def getIntersectionNode(self, headA, headB):\n \"\"\"\n :type head1, head1: ListNode\n :rtype: ListNode\n \"\"\"\n if not headA or not headB:\n return None\n l1 = self.linkList_to_list(headA)\n l2 = self.linkList_to_list(headB)\n length = len(l1) if len(l1) < len(l2) else len(l2)\n index = 0\n for i in range(1, length + 1):\n if l1[-i] == l2[-i]:\n index = i\n if not index < length + 1:\n return None\n return self.get_nth_node(headA, len(l1) - index + 1)\n\n def linkList_to_list(self, head):\n if not head:\n return []\n l = []\n while head:\n l.append(head.val)\n head = head.next\n return l\n\n def get_nth_node(self, head, n):\n try:\n c = 1\n while c < n:\n head = head.next\n c += 1\n return head\n except IndexError:\n return None\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gapit_test_framework import gapit_test, require, require_equal, require_true
from gapit_test_framework import require_not_equal, little_endian_bytes_to_int
from gapit_test_framework import GapitTest, get_read_offset_function
import gapit_test_framework
from vulkan_constants import *
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):
def expect(self):
"""1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4 stride: 4 and dstOffset: 16."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 1))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(16, copy_query_pool_results.int_dstOffset)
require_equal(4, copy_query_pool_results.int_stride)
require_equal(0, copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest):
def expect(self):
"""2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,
queryCount: 4, stride: 8 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 2))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(4, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(8, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,
copy_query_pool_results.int_flags)
@gapit_test("vkCmdCopyQueryPoolResults_test")
class AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(GapitTest):
def expect(self):
"""3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,
queryCount: 4, stride: 12 and dstOffset: 0."""
copy_query_pool_results = require(self.nth_call_of(
"vkCmdCopyQueryPoolResults", 3))
require_not_equal(0, copy_query_pool_results.int_commandBuffer)
require_not_equal(0, copy_query_pool_results.int_queryPool)
require_equal(0, copy_query_pool_results.int_firstQuery)
require_equal(4, copy_query_pool_results.int_queryCount)
require_not_equal(0, copy_query_pool_results.int_dstBuffer)
require_equal(0, copy_query_pool_results.int_dstOffset)
require_equal(12, copy_query_pool_results.int_stride)
require_equal(VK_QUERY_RESULT_PARTIAL_BIT
| VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
copy_query_pool_results.int_flags)
|
normal
|
{
"blob_id": "c2f6fa4d9a6e2ee5f0593bef775ce8f811225613",
"index": 2047,
"step-1": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n <mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-2": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n <mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-3": "<mask token>\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-4": "from gapit_test_framework import gapit_test, require, require_equal, require_true\nfrom gapit_test_framework import require_not_equal, little_endian_bytes_to_int\nfrom gapit_test_framework import GapitTest, get_read_offset_function\nimport gapit_test_framework\nfrom vulkan_constants import *\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest\n ):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test('vkCmdCopyQueryPoolResults_test')\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(\n GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n 'vkCmdCopyQueryPoolResults', 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT |\n VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, copy_query_pool_results.\n int_flags)\n",
"step-5": "# Copyright 2017 Google Inc.\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# http://www.apache.org/licenses/LICENSE-2.0\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom gapit_test_framework import gapit_test, require, require_equal, require_true\nfrom gapit_test_framework import require_not_equal, little_endian_bytes_to_int\nfrom gapit_test_framework import GapitTest, get_read_offset_function\nimport gapit_test_framework\nfrom vulkan_constants import *\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass AllFourQueryResultsIn32BitWithNoFlagCopyWithOffsets(GapitTest):\n\n def expect(self):\n \"\"\"1. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4 stride: 4 and dstOffset: 16.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 1))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(16, copy_query_pool_results.int_dstOffset)\n require_equal(4, copy_query_pool_results.int_stride)\n require_equal(0, copy_query_pool_results.int_flags)\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass FifthToEighthQueryResultsIn64BitWithWaitBitCopyWithZeroOffsets(GapitTest):\n\n def expect(self):\n \"\"\"2. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 4,\n queryCount: 4, stride: 8 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 2))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(4, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(8, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT,\n copy_query_pool_results.int_flags)\n\n\n@gapit_test(\"vkCmdCopyQueryPoolResults_test\")\nclass AllFourQueryResultsIn32BitAnd12StrideWithPartialAndAvailabilityBitWithZeroOffset(GapitTest):\n\n def expect(self):\n \"\"\"3. Expects vkCmdCopyQueryPoolResults() is called with firstQuery: 0,\n queryCount: 4, stride: 12 and dstOffset: 0.\"\"\"\n copy_query_pool_results = require(self.nth_call_of(\n \"vkCmdCopyQueryPoolResults\", 3))\n require_not_equal(0, copy_query_pool_results.int_commandBuffer)\n require_not_equal(0, copy_query_pool_results.int_queryPool)\n require_equal(0, copy_query_pool_results.int_firstQuery)\n require_equal(4, copy_query_pool_results.int_queryCount)\n require_not_equal(0, copy_query_pool_results.int_dstBuffer)\n require_equal(0, copy_query_pool_results.int_dstOffset)\n require_equal(12, copy_query_pool_results.int_stride)\n require_equal(VK_QUERY_RESULT_PARTIAL_BIT\n | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,\n copy_query_pool_results.int_flags)\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 4 13:04:32 2018
@author: andrew
"""
import os
import glob
import initialize
import psf
from astropy.io import fits
import filters
import numpy as np
import sys
import MR
from tqdm import tqdm
def sextractor_MR(location, MR_method='swarp', use_config_file=True):
'''
runs SExtractor on master residual
'''
check_MR = glob.glob("%s/residuals/MR.fits" % (location))
if check_MR == []:
print("-> Master residual does not exist, creating it first...")
if use_config_file == True:
MR_method = initialize.get_config_value('MR_method')
MR.MR(location, MR_method)
master_res = glob.glob("%s/residuals/MR.fits" % (location))
temp = glob.glob("%s/templates/*.fits" % (location))
if len(master_res) == 1:
if len(temp) == 1:
MR = master_res[0]
template = temp[0]
temp_name = template.split('/')[-1]
temp_name = temp_name[:-5]
MR_hdu = fits.open(MR)
MR_header = MR_hdu[0].header
saturate = MR_header['SATURATE']
temp_hdr = fits.getheader(template)
pixscale = temp_hdr['PIXSCALE']
MR_hdu.close()
FWHM = psf.fwhm_template(template)
config_loc = location + '/configs/default.sex'
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[9] = "PARAMETERS_NAME" + " " + location + "/configs/default.param" + "\n"
data[20] = "FILTER_NAME" + " " + location + "/configs/default.conv" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
print("\n-> SExtracting master residual...")
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[51] = "SATUR_LEVEL" + " " + str(saturate) + "\n"
data[62] = "SEEING_FWHM" + " " + str(FWHM) + "\n"
data[106] = "PSF_NAME" + " " + location + "/psf/" + temp_name + ".psf" + "\n"
data[58] = "PIXEL_SCALE" + " " + str(pixscale) + "\n"
data[32] = "WEIGHT_IMAGE" + " " + "%s[1]" % (MR) + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
os.system("sextractor %s[0]> %s/sources/MR_sources.txt -c %s" % (MR, location, config_loc))
temp_hdu_data = fits.PrimaryHDU((fits.getdata(MR))*-1, header=fits.getheader(MR))
temp_hdu_mask = fits.ImageHDU(fits.getdata(MR, 1))
temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])
temp_hdu_list.writeto("%s/residuals/MR_neg.fits" % (location))
os.system("sextractor %s/residuals/MR_neg.fits[0]> %s/sources/MR_sources_2.txt -c %s" % (location, location, config_loc))
append_negative_sources(MR, MR=True)
MR_filter_sources(location)
else:
print("-> Error: Problem with number of template images\n-> Could not finish SExtracting master residual")
else:
print("-> Error: Problem with number of master residuals\n-> Could not finish SExtracting master residual")
def sextractor(location):
'''
runs SExtractor on all residual images
'''
x = 0
sources = location + "/sources"
residuals = location + "/residuals"
check = os.path.exists(sources)
check_temp = os.path.exists(sources + '/temp')
length = len(residuals) + 1
if check == False:
os.system("mkdir %s" % (sources))
os.system("mkdir %s/temp" % (sources))
else:
if check_temp == False:
os.system("mkdir %s/temp" % (sources))
images = glob.glob(residuals + "/*_residual_.fits")
initialize.create_configs(location)
config_loc = location + '/configs/default.sex'
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[9] = "PARAMETERS_NAME" + " " + location + "/configs/default.param" + "\n"
data[20] = "FILTER_NAME" + " " + location + "/configs/default.conv" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
print("-> Converting all residual masks into weight maps...\n")
for r in tqdm(images):
weight = weight_map(r)
hdu = fits.open(r, mode='update')
data = hdu[0].data
hdr = hdu[0].header
try:
if hdr['WEIGHT'] == 'N':
hdr.set('WEIGHT','Y')
hduData = fits.PrimaryHDU(data, header=hdr)
hduWeight = fits.ImageHDU(weight)
hduList = fits.HDUList([hduData, hduWeight])
hduList.writeto(r, overwrite=True)
except KeyError:
hdr.set('WEIGHT','Y')
hduData = fits.PrimaryHDU(data, header=hdr)
hduWeight = fits.ImageHDU(weight)
hduList = fits.HDUList([hduData, hduWeight])
hduList.writeto(r, overwrite=True)
hdu.close()
try:
if fits.getval(r, 'NORM') == 'N':
fits.setval(r, 'NORM', value='Y')
MR.normalize(r)
except KeyError:
fits.setval(r, 'NORM', value='Y')
MR.normalize(r)
print("\n-> SExtracting residual images...")
for i in images:
name = i[length:-5]
data_name = location + '/data/' + name.replace('residual_','') + '.fits'
FWHM = psf.fwhm(data_name)
im_hdu = fits.open(data_name)
im_header = im_hdu[0].header
saturate = im_header['SATURATE']
pixscale = im_header['PIXSCALE']
im_hdu.close()
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[51] = "SATUR_LEVEL" + " " + str(saturate) + "\n"
data[62] = "SEEING_FWHM" + " " + str(FWHM) + "\n"
data[106] = "PSF_NAME" + " " + location + "/psf/" + name[:-9] + ".psf" + "\n"
data[58] = "PIXEL_SCALE" + " " + str(pixscale) + "\n"
data[32] = "WEIGHT_IMAGE" + " " + "%s[1]" % (i) + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
os.system("sextractor %s[0]> %s/temp/%s.txt -c %s" % (i, sources, name, config_loc))
temp_hdu_data = fits.PrimaryHDU((fits.getdata(i))*-1, header=fits.getheader(i))
temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))
temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])
temp_hdu_list.writeto("%s/residuals/temp.fits" % (location))
os.system("sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s" % (location, sources, name, config_loc))
append_negative_sources(i)
os.remove("%s/residuals/temp.fits" % (location))
x += 1
per = float(x)/float(len(images)) * 100
print("\t %.1f%% sextracted..." % (per))
print("-> SExtracted %d images, catalogues placed in 'sources' directory\n" % (len(images)))
print("-> Filtering source catalogs...\n")
src_join(location)
filter_sources(location)
def sextractor_sim(image):
location = image.split('/')[:-2]
location = '/'.join(location)
sources = location + "/sources"
check = os.path.exists(sources)
check_temp = os.path.exists(sources + '/temp')
if check == False:
os.system("mkdir %s" % (sources))
os.system("mkdir %s/temp" % (sources))
else:
if check_temp == False:
os.system("mkdir %s/temp" % (sources))
initialize.create_configs(location)
config_loc = location + '/configs/default.sex'
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[9] = "PARAMETERS_NAME" + " " + location + "/configs/default.param" + "\n"
data[20] = "FILTER_NAME" + " " + location + "/configs/default.conv" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
print("\n-> SExtracting fake image...")
name = image.split('/')[-1]
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[106] = "PSF_NAME" + " " + location + "/psf/" + name[:-5] + ".psf" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
os.system("sextractor %s[0]> %s/temp/%s.txt -c %s" % (image, sources, name, config_loc))
temp_hdu_data = fits.PrimaryHDU((fits.getdata(image))*-1, header=fits.getheader(image))
temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))
temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])
temp_hdu_list.writeto("%s/residuals/temp.fits")
os.system("sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s" % (location, sources, name, config_loc))
os.remove("%s/residuals/temp.fits" % (location))
src_join(location)
filter_sources(location)
def sextractor_psf(location):
x = 0
psf_loc = location + "/psf"
data = location + "/data"
templates = location + "/templates"
check = os.path.exists(psf_loc)
if check == False:
os.system("mkdir %s" % (psf_loc))
temps = glob.glob(templates + "/*.fits")
images = glob.glob(data + "/*_A_.fits")
for t in temps:
images.append(t)
cats = glob.glob(location + '/psf/*.cat')
images_names = [(i.split('/')[-1])[:-5] for i in images]
cats_names = [(c.split('/')[-1])[:-4] for c in cats]
imageCats = [im for im in images_names if im not in cats_names]
images = []
if temps == []:
temps.append('')
for imcats in imageCats:
if imcats == (temps[0].split('/')[-1])[:-5]:
images.append(temps[0])
else:
images.append(location+'/data/'+imcats+'.fits')
initialize.create_configs(location)
config_loc = location + '/configs/psf.sex'
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[9] = "PARAMETERS_NAME" + " " + location + "/configs/default.psfex" + "\n"
data[19] = "FILTER_NAME" + " " + location + "/configs/default.conv" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
print("\n-> Creating PSF catalogs...")
if len(temps) == 1:
for i in images:
name = i.split('/')[-1][:-5]
hdu = fits.open(i)
hdr = hdu[0].header
pixscale = hdr['PIXSCALE']
hdu.close()
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[6] = "CATALOG_NAME" + " " + psf_loc + "/" + name + ".cat" + "\n"
data[44] = "PIXEL_SCALE" + " " + str(pixscale) + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
os.system("sextractor %s[0] -c %s" % (i, config_loc))
x += 1
per = float(x)/float(len(images)) * 100
print("\t %.1f%% sextracted..." % (per))
print("-> SExtracted %d images, catalogues placed in 'psf' directory\n" % (len(images)))
else:
print("\n-> Error: Problem with number of template images\n")
sys.exit()
return images
def sextractor_psf_sim(location, image):
psf_loc = location + "/psf"
data = location + "/data"
check = os.path.exists(psf_loc)
length = len(data) + 1
if check == False:
os.system("mkdir %s" % (psf_loc))
initialize.create_configs(location)
config_loc = location + '/configs/psf.sex'
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[9] = "PARAMETERS_NAME" + " " + location + "/configs/default.psfex" + "\n"
data[20] = "FILTER_NAME" + " " + location + "/configs/default.conv" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
print("\n-> Creating PSF catalog of fake image...")
name = image[length:-5]
with open(config_loc, 'r') as config:
data = config.readlines()
config.close()
data[6] = "CATALOG_NAME" + " " + psf_loc + "/" + name + ".cat" + "\n"
with open(config_loc, 'w') as config:
config.writelines(data)
config.close()
os.system("sextractor %s[0] -c %s" % (image, config_loc))
def weight_map(image):
hdu = fits.open(image)
hduMask = hdu[1].data
zeroMask = np.zeros(hduMask.shape)
weightMap = (np.logical_not(np.logical_or(hduMask,zeroMask))).astype(float)
hdu.close()
return weightMap
def src_join(location):
source_loc = location + '/sources'
temp_source_loc = source_loc + '/temp'
temp_source_files = glob.glob(temp_source_loc + '/*.txt')
image_names = filters.get_image_names(location)
for file in temp_source_files:
with open(file, 'r') as fl:
data = fl.readlines()
data = [str(file.replace('txt','fits')[len(source_loc)+6:]) + '\n'] + data
data.append("\n\n\n")
with open(source_loc + '/sources.txt', 'a+') as s:
if data[0] not in image_names:
s.writelines(data)
os.remove(file)
try:
os.rmdir(temp_source_loc)
except:
print("-> Error: Problem removing temp directory in '/sources'")
def filter_sources(location, mask_sources=False):
print("\n-> Filtering out non PSF-like sources...")
filters.spread_model_filter(location)
print("-> Filtering out diveted detections...")
images = glob.glob(location + '/data/*_A_.fits')
for i in images:
indices = filters.divot(i)
filters.update_filtered_sources(location, indices)
residuals = glob.glob("%s/residuals/*_residual_.fits" % (location))
if mask_sources == True:
for r in residuals:
filters.mask_sources_image(r)
def MR_filter_sources(location):
with open("%s/sources/MR_sources.txt" % (location), 'r') as MR_src:
MR_lines = MR_src.readlines()
MR_lines.insert(0, "MR.fits\n")
with open("%s/sources/MR_sources.txt" % (location), 'w+') as MR_src:
for line in MR_lines:
MR_src.write(line)
MR_loc = "%s/residuals/MR.fits" % (location)
print("\n-> Filtering out non PSF-like sources in master residual...")
filters.spread_model_filter(location, MR=True)
print("-> Filtering out diveted detections in master residual...")
indices = filters.divot(MR_loc, MR=True)
filters.update_filtered_sources(location, indices, MR=True)
filters.write_total_sources(location)
def append_negative_sources(residual, MR=False):
location = residual.split('/')[:-2]
location = '/'.join(location)
name = residual.split('/')[-1]
name = name.replace('.fits', '')
if MR == True:
with open("%s/sources/%s_sources_2.txt" % (location, name), 'r') as neg_sources:
lines = neg_sources.readlines()
with open("%s/sources/%s_sources.txt" % (location, name), 'a') as sources:
for l in lines:
if l[0] != '#':
sources.write(l)
os.remove("%s/sources/%s_sources_2.txt" % (location, name))
else:
with open("%s/sources/temp/%s_2.txt" % (location, name), 'r') as neg_sources:
lines = neg_sources.readlines()
with open("%s/sources/temp/%s.txt" % (location, name), 'a') as sources:
for l in lines:
if l[0] != '#':
sources.write(l)
os.remove("%s/sources/temp/%s_2.txt" % (location, name))
|
normal
|
{
"blob_id": "6f5eda426daf5db84dc205f36ec31e9076acb8ee",
"index": 8971,
"step-1": "<mask token>\n\n\ndef sextractor(location):\n \"\"\"\n runs SExtractor on all residual images\n \"\"\"\n x = 0\n sources = location + '/sources'\n residuals = location + '/residuals'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n length = len(residuals) + 1\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n images = glob.glob(residuals + '/*_residual_.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('-> Converting all residual masks into weight maps...\\n')\n for r in tqdm(images):\n weight = weight_map(r)\n hdu = fits.open(r, mode='update')\n data = hdu[0].data\n hdr = hdu[0].header\n try:\n if hdr['WEIGHT'] == 'N':\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n except KeyError:\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n hdu.close()\n try:\n if fits.getval(r, 'NORM') == 'N':\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n except KeyError:\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n print('\\n-> SExtracting residual images...')\n for i in images:\n name = i[length:-5]\n data_name = location + '/data/' + name.replace('residual_', ''\n ) + '.fits'\n FWHM = psf.fwhm(data_name)\n im_hdu = fits.open(data_name)\n im_header = im_hdu[0].header\n saturate = im_header['SATURATE']\n pixscale = im_header['PIXSCALE']\n im_hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-9\n ] + '.psf' + '\\n'\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % i + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (i, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(i) * -1, header=fits.\n getheader(i))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits' % location)\n os.system(\n 'sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s' %\n (location, sources, name, config_loc))\n append_negative_sources(i)\n os.remove('%s/residuals/temp.fits' % location)\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'sources' directory\\n\" %\n len(images))\n print('-> Filtering source catalogs...\\n')\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_sim(image):\n location = image.split('/')[:-2]\n location = '/'.join(location)\n sources = location + '/sources'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting fake image...')\n name = image.split('/')[-1]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-5\n ] + '.psf' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (image, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(image) * -1, header=fits.\n getheader(image))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits')\n os.system('sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s' %\n (location, sources, name, config_loc))\n os.remove('%s/residuals/temp.fits' % location)\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_psf(location):\n x = 0\n psf_loc = location + '/psf'\n data = location + '/data'\n templates = location + '/templates'\n check = os.path.exists(psf_loc)\n if check == False:\n os.system('mkdir %s' % psf_loc)\n temps = glob.glob(templates + '/*.fits')\n images = glob.glob(data + '/*_A_.fits')\n for t in temps:\n images.append(t)\n cats = glob.glob(location + '/psf/*.cat')\n images_names = [i.split('/')[-1][:-5] for i in images]\n cats_names = [c.split('/')[-1][:-4] for c in cats]\n imageCats = [im for im in images_names if im not in cats_names]\n images = []\n if temps == []:\n temps.append('')\n for imcats in imageCats:\n if imcats == temps[0].split('/')[-1][:-5]:\n images.append(temps[0])\n else:\n images.append(location + '/data/' + imcats + '.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[19\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalogs...')\n if len(temps) == 1:\n for i in images:\n name = i.split('/')[-1][:-5]\n hdu = fits.open(i)\n hdr = hdu[0].header\n pixscale = hdr['PIXSCALE']\n hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = ('CATALOG_NAME' + ' ' + psf_loc + '/' + name +\n '.cat' + '\\n')\n data[44] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (i, config_loc))\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'psf' directory\\n\" %\n len(images))\n else:\n print('\\n-> Error: Problem with number of template images\\n')\n sys.exit()\n return images\n\n\ndef sextractor_psf_sim(location, image):\n psf_loc = location + '/psf'\n data = location + '/data'\n check = os.path.exists(psf_loc)\n length = len(data) + 1\n if check == False:\n os.system('mkdir %s' % psf_loc)\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalog of fake image...')\n name = image[length:-5]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6\n ] = 'CATALOG_NAME' + ' ' + psf_loc + '/' + name + '.cat' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (image, config_loc))\n\n\ndef weight_map(image):\n hdu = fits.open(image)\n hduMask = hdu[1].data\n zeroMask = np.zeros(hduMask.shape)\n weightMap = np.logical_not(np.logical_or(hduMask, zeroMask)).astype(float)\n hdu.close()\n return weightMap\n\n\ndef src_join(location):\n source_loc = location + '/sources'\n temp_source_loc = source_loc + '/temp'\n temp_source_files = glob.glob(temp_source_loc + '/*.txt')\n image_names = filters.get_image_names(location)\n for file in temp_source_files:\n with open(file, 'r') as fl:\n data = fl.readlines()\n data = [str(file.replace('txt', 'fits')[len(source_loc) + 6:]) + '\\n'\n ] + data\n data.append('\\n\\n\\n')\n with open(source_loc + '/sources.txt', 'a+') as s:\n if data[0] not in image_names:\n s.writelines(data)\n os.remove(file)\n try:\n os.rmdir(temp_source_loc)\n except:\n print(\"-> Error: Problem removing temp directory in '/sources'\")\n\n\ndef filter_sources(location, mask_sources=False):\n print('\\n-> Filtering out non PSF-like sources...')\n filters.spread_model_filter(location)\n print('-> Filtering out diveted detections...')\n images = glob.glob(location + '/data/*_A_.fits')\n for i in images:\n indices = filters.divot(i)\n filters.update_filtered_sources(location, indices)\n residuals = glob.glob('%s/residuals/*_residual_.fits' % location)\n if mask_sources == True:\n for r in residuals:\n filters.mask_sources_image(r)\n\n\n<mask token>\n\n\ndef append_negative_sources(residual, MR=False):\n location = residual.split('/')[:-2]\n location = '/'.join(location)\n name = residual.split('/')[-1]\n name = name.replace('.fits', '')\n if MR == True:\n with open('%s/sources/%s_sources_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/%s_sources.txt' % (location, name), 'a'\n ) as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/%s_sources_2.txt' % (location, name))\n else:\n with open('%s/sources/temp/%s_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/temp/%s.txt' % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/temp/%s_2.txt' % (location, name))\n",
"step-2": "<mask token>\n\n\ndef sextractor_MR(location, MR_method='swarp', use_config_file=True):\n \"\"\"\n runs SExtractor on master residual\n \"\"\"\n check_MR = glob.glob('%s/residuals/MR.fits' % location)\n if check_MR == []:\n print('-> Master residual does not exist, creating it first...')\n if use_config_file == True:\n MR_method = initialize.get_config_value('MR_method')\n MR.MR(location, MR_method)\n master_res = glob.glob('%s/residuals/MR.fits' % location)\n temp = glob.glob('%s/templates/*.fits' % location)\n if len(master_res) == 1:\n if len(temp) == 1:\n MR = master_res[0]\n template = temp[0]\n temp_name = template.split('/')[-1]\n temp_name = temp_name[:-5]\n MR_hdu = fits.open(MR)\n MR_header = MR_hdu[0].header\n saturate = MR_header['SATURATE']\n temp_hdr = fits.getheader(template)\n pixscale = temp_hdr['PIXSCALE']\n MR_hdu.close()\n FWHM = psf.fwhm_template(template)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20] = ('FILTER_NAME' + ' ' + location +\n '/configs/default.conv' + '\\n')\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting master residual...')\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = ('PSF_NAME' + ' ' + location + '/psf/' +\n temp_name + '.psf' + '\\n')\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % MR + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/sources/MR_sources.txt -c %s' %\n (MR, location, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(MR) * -1, header=\n fits.getheader(MR))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(MR, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/MR_neg.fits' % location)\n os.system(\n 'sextractor %s/residuals/MR_neg.fits[0]> %s/sources/MR_sources_2.txt -c %s'\n % (location, location, config_loc))\n append_negative_sources(MR, MR=True)\n MR_filter_sources(location)\n else:\n print(\n \"\"\"-> Error: Problem with number of template images\n-> Could not finish SExtracting master residual\"\"\"\n )\n else:\n print(\n \"\"\"-> Error: Problem with number of master residuals\n-> Could not finish SExtracting master residual\"\"\"\n )\n\n\ndef sextractor(location):\n \"\"\"\n runs SExtractor on all residual images\n \"\"\"\n x = 0\n sources = location + '/sources'\n residuals = location + '/residuals'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n length = len(residuals) + 1\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n images = glob.glob(residuals + '/*_residual_.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('-> Converting all residual masks into weight maps...\\n')\n for r in tqdm(images):\n weight = weight_map(r)\n hdu = fits.open(r, mode='update')\n data = hdu[0].data\n hdr = hdu[0].header\n try:\n if hdr['WEIGHT'] == 'N':\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n except KeyError:\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n hdu.close()\n try:\n if fits.getval(r, 'NORM') == 'N':\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n except KeyError:\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n print('\\n-> SExtracting residual images...')\n for i in images:\n name = i[length:-5]\n data_name = location + '/data/' + name.replace('residual_', ''\n ) + '.fits'\n FWHM = psf.fwhm(data_name)\n im_hdu = fits.open(data_name)\n im_header = im_hdu[0].header\n saturate = im_header['SATURATE']\n pixscale = im_header['PIXSCALE']\n im_hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-9\n ] + '.psf' + '\\n'\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % i + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (i, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(i) * -1, header=fits.\n getheader(i))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits' % location)\n os.system(\n 'sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s' %\n (location, sources, name, config_loc))\n append_negative_sources(i)\n os.remove('%s/residuals/temp.fits' % location)\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'sources' directory\\n\" %\n len(images))\n print('-> Filtering source catalogs...\\n')\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_sim(image):\n location = image.split('/')[:-2]\n location = '/'.join(location)\n sources = location + '/sources'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting fake image...')\n name = image.split('/')[-1]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-5\n ] + '.psf' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (image, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(image) * -1, header=fits.\n getheader(image))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits')\n os.system('sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s' %\n (location, sources, name, config_loc))\n os.remove('%s/residuals/temp.fits' % location)\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_psf(location):\n x = 0\n psf_loc = location + '/psf'\n data = location + '/data'\n templates = location + '/templates'\n check = os.path.exists(psf_loc)\n if check == False:\n os.system('mkdir %s' % psf_loc)\n temps = glob.glob(templates + '/*.fits')\n images = glob.glob(data + '/*_A_.fits')\n for t in temps:\n images.append(t)\n cats = glob.glob(location + '/psf/*.cat')\n images_names = [i.split('/')[-1][:-5] for i in images]\n cats_names = [c.split('/')[-1][:-4] for c in cats]\n imageCats = [im for im in images_names if im not in cats_names]\n images = []\n if temps == []:\n temps.append('')\n for imcats in imageCats:\n if imcats == temps[0].split('/')[-1][:-5]:\n images.append(temps[0])\n else:\n images.append(location + '/data/' + imcats + '.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[19\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalogs...')\n if len(temps) == 1:\n for i in images:\n name = i.split('/')[-1][:-5]\n hdu = fits.open(i)\n hdr = hdu[0].header\n pixscale = hdr['PIXSCALE']\n hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = ('CATALOG_NAME' + ' ' + psf_loc + '/' + name +\n '.cat' + '\\n')\n data[44] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (i, config_loc))\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'psf' directory\\n\" %\n len(images))\n else:\n print('\\n-> Error: Problem with number of template images\\n')\n sys.exit()\n return images\n\n\ndef sextractor_psf_sim(location, image):\n psf_loc = location + '/psf'\n data = location + '/data'\n check = os.path.exists(psf_loc)\n length = len(data) + 1\n if check == False:\n os.system('mkdir %s' % psf_loc)\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalog of fake image...')\n name = image[length:-5]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6\n ] = 'CATALOG_NAME' + ' ' + psf_loc + '/' + name + '.cat' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (image, config_loc))\n\n\ndef weight_map(image):\n hdu = fits.open(image)\n hduMask = hdu[1].data\n zeroMask = np.zeros(hduMask.shape)\n weightMap = np.logical_not(np.logical_or(hduMask, zeroMask)).astype(float)\n hdu.close()\n return weightMap\n\n\ndef src_join(location):\n source_loc = location + '/sources'\n temp_source_loc = source_loc + '/temp'\n temp_source_files = glob.glob(temp_source_loc + '/*.txt')\n image_names = filters.get_image_names(location)\n for file in temp_source_files:\n with open(file, 'r') as fl:\n data = fl.readlines()\n data = [str(file.replace('txt', 'fits')[len(source_loc) + 6:]) + '\\n'\n ] + data\n data.append('\\n\\n\\n')\n with open(source_loc + '/sources.txt', 'a+') as s:\n if data[0] not in image_names:\n s.writelines(data)\n os.remove(file)\n try:\n os.rmdir(temp_source_loc)\n except:\n print(\"-> Error: Problem removing temp directory in '/sources'\")\n\n\ndef filter_sources(location, mask_sources=False):\n print('\\n-> Filtering out non PSF-like sources...')\n filters.spread_model_filter(location)\n print('-> Filtering out diveted detections...')\n images = glob.glob(location + '/data/*_A_.fits')\n for i in images:\n indices = filters.divot(i)\n filters.update_filtered_sources(location, indices)\n residuals = glob.glob('%s/residuals/*_residual_.fits' % location)\n if mask_sources == True:\n for r in residuals:\n filters.mask_sources_image(r)\n\n\n<mask token>\n\n\ndef append_negative_sources(residual, MR=False):\n location = residual.split('/')[:-2]\n location = '/'.join(location)\n name = residual.split('/')[-1]\n name = name.replace('.fits', '')\n if MR == True:\n with open('%s/sources/%s_sources_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/%s_sources.txt' % (location, name), 'a'\n ) as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/%s_sources_2.txt' % (location, name))\n else:\n with open('%s/sources/temp/%s_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/temp/%s.txt' % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/temp/%s_2.txt' % (location, name))\n",
"step-3": "<mask token>\n\n\ndef sextractor_MR(location, MR_method='swarp', use_config_file=True):\n \"\"\"\n runs SExtractor on master residual\n \"\"\"\n check_MR = glob.glob('%s/residuals/MR.fits' % location)\n if check_MR == []:\n print('-> Master residual does not exist, creating it first...')\n if use_config_file == True:\n MR_method = initialize.get_config_value('MR_method')\n MR.MR(location, MR_method)\n master_res = glob.glob('%s/residuals/MR.fits' % location)\n temp = glob.glob('%s/templates/*.fits' % location)\n if len(master_res) == 1:\n if len(temp) == 1:\n MR = master_res[0]\n template = temp[0]\n temp_name = template.split('/')[-1]\n temp_name = temp_name[:-5]\n MR_hdu = fits.open(MR)\n MR_header = MR_hdu[0].header\n saturate = MR_header['SATURATE']\n temp_hdr = fits.getheader(template)\n pixscale = temp_hdr['PIXSCALE']\n MR_hdu.close()\n FWHM = psf.fwhm_template(template)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20] = ('FILTER_NAME' + ' ' + location +\n '/configs/default.conv' + '\\n')\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting master residual...')\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = ('PSF_NAME' + ' ' + location + '/psf/' +\n temp_name + '.psf' + '\\n')\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % MR + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/sources/MR_sources.txt -c %s' %\n (MR, location, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(MR) * -1, header=\n fits.getheader(MR))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(MR, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/MR_neg.fits' % location)\n os.system(\n 'sextractor %s/residuals/MR_neg.fits[0]> %s/sources/MR_sources_2.txt -c %s'\n % (location, location, config_loc))\n append_negative_sources(MR, MR=True)\n MR_filter_sources(location)\n else:\n print(\n \"\"\"-> Error: Problem with number of template images\n-> Could not finish SExtracting master residual\"\"\"\n )\n else:\n print(\n \"\"\"-> Error: Problem with number of master residuals\n-> Could not finish SExtracting master residual\"\"\"\n )\n\n\ndef sextractor(location):\n \"\"\"\n runs SExtractor on all residual images\n \"\"\"\n x = 0\n sources = location + '/sources'\n residuals = location + '/residuals'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n length = len(residuals) + 1\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n images = glob.glob(residuals + '/*_residual_.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('-> Converting all residual masks into weight maps...\\n')\n for r in tqdm(images):\n weight = weight_map(r)\n hdu = fits.open(r, mode='update')\n data = hdu[0].data\n hdr = hdu[0].header\n try:\n if hdr['WEIGHT'] == 'N':\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n except KeyError:\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n hdu.close()\n try:\n if fits.getval(r, 'NORM') == 'N':\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n except KeyError:\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n print('\\n-> SExtracting residual images...')\n for i in images:\n name = i[length:-5]\n data_name = location + '/data/' + name.replace('residual_', ''\n ) + '.fits'\n FWHM = psf.fwhm(data_name)\n im_hdu = fits.open(data_name)\n im_header = im_hdu[0].header\n saturate = im_header['SATURATE']\n pixscale = im_header['PIXSCALE']\n im_hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-9\n ] + '.psf' + '\\n'\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % i + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (i, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(i) * -1, header=fits.\n getheader(i))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits' % location)\n os.system(\n 'sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s' %\n (location, sources, name, config_loc))\n append_negative_sources(i)\n os.remove('%s/residuals/temp.fits' % location)\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'sources' directory\\n\" %\n len(images))\n print('-> Filtering source catalogs...\\n')\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_sim(image):\n location = image.split('/')[:-2]\n location = '/'.join(location)\n sources = location + '/sources'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting fake image...')\n name = image.split('/')[-1]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-5\n ] + '.psf' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (image, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(image) * -1, header=fits.\n getheader(image))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits')\n os.system('sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s' %\n (location, sources, name, config_loc))\n os.remove('%s/residuals/temp.fits' % location)\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_psf(location):\n x = 0\n psf_loc = location + '/psf'\n data = location + '/data'\n templates = location + '/templates'\n check = os.path.exists(psf_loc)\n if check == False:\n os.system('mkdir %s' % psf_loc)\n temps = glob.glob(templates + '/*.fits')\n images = glob.glob(data + '/*_A_.fits')\n for t in temps:\n images.append(t)\n cats = glob.glob(location + '/psf/*.cat')\n images_names = [i.split('/')[-1][:-5] for i in images]\n cats_names = [c.split('/')[-1][:-4] for c in cats]\n imageCats = [im for im in images_names if im not in cats_names]\n images = []\n if temps == []:\n temps.append('')\n for imcats in imageCats:\n if imcats == temps[0].split('/')[-1][:-5]:\n images.append(temps[0])\n else:\n images.append(location + '/data/' + imcats + '.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[19\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalogs...')\n if len(temps) == 1:\n for i in images:\n name = i.split('/')[-1][:-5]\n hdu = fits.open(i)\n hdr = hdu[0].header\n pixscale = hdr['PIXSCALE']\n hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = ('CATALOG_NAME' + ' ' + psf_loc + '/' + name +\n '.cat' + '\\n')\n data[44] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (i, config_loc))\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'psf' directory\\n\" %\n len(images))\n else:\n print('\\n-> Error: Problem with number of template images\\n')\n sys.exit()\n return images\n\n\ndef sextractor_psf_sim(location, image):\n psf_loc = location + '/psf'\n data = location + '/data'\n check = os.path.exists(psf_loc)\n length = len(data) + 1\n if check == False:\n os.system('mkdir %s' % psf_loc)\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalog of fake image...')\n name = image[length:-5]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6\n ] = 'CATALOG_NAME' + ' ' + psf_loc + '/' + name + '.cat' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (image, config_loc))\n\n\ndef weight_map(image):\n hdu = fits.open(image)\n hduMask = hdu[1].data\n zeroMask = np.zeros(hduMask.shape)\n weightMap = np.logical_not(np.logical_or(hduMask, zeroMask)).astype(float)\n hdu.close()\n return weightMap\n\n\ndef src_join(location):\n source_loc = location + '/sources'\n temp_source_loc = source_loc + '/temp'\n temp_source_files = glob.glob(temp_source_loc + '/*.txt')\n image_names = filters.get_image_names(location)\n for file in temp_source_files:\n with open(file, 'r') as fl:\n data = fl.readlines()\n data = [str(file.replace('txt', 'fits')[len(source_loc) + 6:]) + '\\n'\n ] + data\n data.append('\\n\\n\\n')\n with open(source_loc + '/sources.txt', 'a+') as s:\n if data[0] not in image_names:\n s.writelines(data)\n os.remove(file)\n try:\n os.rmdir(temp_source_loc)\n except:\n print(\"-> Error: Problem removing temp directory in '/sources'\")\n\n\ndef filter_sources(location, mask_sources=False):\n print('\\n-> Filtering out non PSF-like sources...')\n filters.spread_model_filter(location)\n print('-> Filtering out diveted detections...')\n images = glob.glob(location + '/data/*_A_.fits')\n for i in images:\n indices = filters.divot(i)\n filters.update_filtered_sources(location, indices)\n residuals = glob.glob('%s/residuals/*_residual_.fits' % location)\n if mask_sources == True:\n for r in residuals:\n filters.mask_sources_image(r)\n\n\ndef MR_filter_sources(location):\n with open('%s/sources/MR_sources.txt' % location, 'r') as MR_src:\n MR_lines = MR_src.readlines()\n MR_lines.insert(0, 'MR.fits\\n')\n with open('%s/sources/MR_sources.txt' % location, 'w+') as MR_src:\n for line in MR_lines:\n MR_src.write(line)\n MR_loc = '%s/residuals/MR.fits' % location\n print('\\n-> Filtering out non PSF-like sources in master residual...')\n filters.spread_model_filter(location, MR=True)\n print('-> Filtering out diveted detections in master residual...')\n indices = filters.divot(MR_loc, MR=True)\n filters.update_filtered_sources(location, indices, MR=True)\n filters.write_total_sources(location)\n\n\ndef append_negative_sources(residual, MR=False):\n location = residual.split('/')[:-2]\n location = '/'.join(location)\n name = residual.split('/')[-1]\n name = name.replace('.fits', '')\n if MR == True:\n with open('%s/sources/%s_sources_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/%s_sources.txt' % (location, name), 'a'\n ) as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/%s_sources_2.txt' % (location, name))\n else:\n with open('%s/sources/temp/%s_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/temp/%s.txt' % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/temp/%s_2.txt' % (location, name))\n",
"step-4": "<mask token>\nimport os\nimport glob\nimport initialize\nimport psf\nfrom astropy.io import fits\nimport filters\nimport numpy as np\nimport sys\nimport MR\nfrom tqdm import tqdm\n\n\ndef sextractor_MR(location, MR_method='swarp', use_config_file=True):\n \"\"\"\n runs SExtractor on master residual\n \"\"\"\n check_MR = glob.glob('%s/residuals/MR.fits' % location)\n if check_MR == []:\n print('-> Master residual does not exist, creating it first...')\n if use_config_file == True:\n MR_method = initialize.get_config_value('MR_method')\n MR.MR(location, MR_method)\n master_res = glob.glob('%s/residuals/MR.fits' % location)\n temp = glob.glob('%s/templates/*.fits' % location)\n if len(master_res) == 1:\n if len(temp) == 1:\n MR = master_res[0]\n template = temp[0]\n temp_name = template.split('/')[-1]\n temp_name = temp_name[:-5]\n MR_hdu = fits.open(MR)\n MR_header = MR_hdu[0].header\n saturate = MR_header['SATURATE']\n temp_hdr = fits.getheader(template)\n pixscale = temp_hdr['PIXSCALE']\n MR_hdu.close()\n FWHM = psf.fwhm_template(template)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20] = ('FILTER_NAME' + ' ' + location +\n '/configs/default.conv' + '\\n')\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting master residual...')\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = ('PSF_NAME' + ' ' + location + '/psf/' +\n temp_name + '.psf' + '\\n')\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % MR + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/sources/MR_sources.txt -c %s' %\n (MR, location, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(MR) * -1, header=\n fits.getheader(MR))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(MR, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/MR_neg.fits' % location)\n os.system(\n 'sextractor %s/residuals/MR_neg.fits[0]> %s/sources/MR_sources_2.txt -c %s'\n % (location, location, config_loc))\n append_negative_sources(MR, MR=True)\n MR_filter_sources(location)\n else:\n print(\n \"\"\"-> Error: Problem with number of template images\n-> Could not finish SExtracting master residual\"\"\"\n )\n else:\n print(\n \"\"\"-> Error: Problem with number of master residuals\n-> Could not finish SExtracting master residual\"\"\"\n )\n\n\ndef sextractor(location):\n \"\"\"\n runs SExtractor on all residual images\n \"\"\"\n x = 0\n sources = location + '/sources'\n residuals = location + '/residuals'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n length = len(residuals) + 1\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n images = glob.glob(residuals + '/*_residual_.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('-> Converting all residual masks into weight maps...\\n')\n for r in tqdm(images):\n weight = weight_map(r)\n hdu = fits.open(r, mode='update')\n data = hdu[0].data\n hdr = hdu[0].header\n try:\n if hdr['WEIGHT'] == 'N':\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n except KeyError:\n hdr.set('WEIGHT', 'Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n hdu.close()\n try:\n if fits.getval(r, 'NORM') == 'N':\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n except KeyError:\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n print('\\n-> SExtracting residual images...')\n for i in images:\n name = i[length:-5]\n data_name = location + '/data/' + name.replace('residual_', ''\n ) + '.fits'\n FWHM = psf.fwhm(data_name)\n im_hdu = fits.open(data_name)\n im_header = im_hdu[0].header\n saturate = im_header['SATURATE']\n pixscale = im_header['PIXSCALE']\n im_hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = 'SATUR_LEVEL' + ' ' + str(saturate) + '\\n'\n data[62] = 'SEEING_FWHM' + ' ' + str(FWHM) + '\\n'\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-9\n ] + '.psf' + '\\n'\n data[58] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n data[32] = 'WEIGHT_IMAGE' + ' ' + '%s[1]' % i + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (i, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(i) * -1, header=fits.\n getheader(i))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits' % location)\n os.system(\n 'sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s' %\n (location, sources, name, config_loc))\n append_negative_sources(i)\n os.remove('%s/residuals/temp.fits' % location)\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'sources' directory\\n\" %\n len(images))\n print('-> Filtering source catalogs...\\n')\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_sim(image):\n location = image.split('/')[:-2]\n location = '/'.join(location)\n sources = location + '/sources'\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n if check == False:\n os.system('mkdir %s' % sources)\n os.system('mkdir %s/temp' % sources)\n elif check_temp == False:\n os.system('mkdir %s/temp' % sources)\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.param' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> SExtracting fake image...')\n name = image.split('/')[-1]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[106] = 'PSF_NAME' + ' ' + location + '/psf/' + name[:-5\n ] + '.psf' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0]> %s/temp/%s.txt -c %s' % (image, sources,\n name, config_loc))\n temp_hdu_data = fits.PrimaryHDU(fits.getdata(image) * -1, header=fits.\n getheader(image))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto('%s/residuals/temp.fits')\n os.system('sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s' %\n (location, sources, name, config_loc))\n os.remove('%s/residuals/temp.fits' % location)\n src_join(location)\n filter_sources(location)\n\n\ndef sextractor_psf(location):\n x = 0\n psf_loc = location + '/psf'\n data = location + '/data'\n templates = location + '/templates'\n check = os.path.exists(psf_loc)\n if check == False:\n os.system('mkdir %s' % psf_loc)\n temps = glob.glob(templates + '/*.fits')\n images = glob.glob(data + '/*_A_.fits')\n for t in temps:\n images.append(t)\n cats = glob.glob(location + '/psf/*.cat')\n images_names = [i.split('/')[-1][:-5] for i in images]\n cats_names = [c.split('/')[-1][:-4] for c in cats]\n imageCats = [im for im in images_names if im not in cats_names]\n images = []\n if temps == []:\n temps.append('')\n for imcats in imageCats:\n if imcats == temps[0].split('/')[-1][:-5]:\n images.append(temps[0])\n else:\n images.append(location + '/data/' + imcats + '.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[19\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalogs...')\n if len(temps) == 1:\n for i in images:\n name = i.split('/')[-1][:-5]\n hdu = fits.open(i)\n hdr = hdu[0].header\n pixscale = hdr['PIXSCALE']\n hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = ('CATALOG_NAME' + ' ' + psf_loc + '/' + name +\n '.cat' + '\\n')\n data[44] = 'PIXEL_SCALE' + ' ' + str(pixscale) + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (i, config_loc))\n x += 1\n per = float(x) / float(len(images)) * 100\n print('\\t %.1f%% sextracted...' % per)\n print(\n \"-> SExtracted %d images, catalogues placed in 'psf' directory\\n\" %\n len(images))\n else:\n print('\\n-> Error: Problem with number of template images\\n')\n sys.exit()\n return images\n\n\ndef sextractor_psf_sim(location, image):\n psf_loc = location + '/psf'\n data = location + '/data'\n check = os.path.exists(psf_loc)\n length = len(data) + 1\n if check == False:\n os.system('mkdir %s' % psf_loc)\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = ('PARAMETERS_NAME' + ' ' + location +\n '/configs/default.psfex' + '\\n')\n data[20\n ] = 'FILTER_NAME' + ' ' + location + '/configs/default.conv' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print('\\n-> Creating PSF catalog of fake image...')\n name = image[length:-5]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6\n ] = 'CATALOG_NAME' + ' ' + psf_loc + '/' + name + '.cat' + '\\n'\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system('sextractor %s[0] -c %s' % (image, config_loc))\n\n\ndef weight_map(image):\n hdu = fits.open(image)\n hduMask = hdu[1].data\n zeroMask = np.zeros(hduMask.shape)\n weightMap = np.logical_not(np.logical_or(hduMask, zeroMask)).astype(float)\n hdu.close()\n return weightMap\n\n\ndef src_join(location):\n source_loc = location + '/sources'\n temp_source_loc = source_loc + '/temp'\n temp_source_files = glob.glob(temp_source_loc + '/*.txt')\n image_names = filters.get_image_names(location)\n for file in temp_source_files:\n with open(file, 'r') as fl:\n data = fl.readlines()\n data = [str(file.replace('txt', 'fits')[len(source_loc) + 6:]) + '\\n'\n ] + data\n data.append('\\n\\n\\n')\n with open(source_loc + '/sources.txt', 'a+') as s:\n if data[0] not in image_names:\n s.writelines(data)\n os.remove(file)\n try:\n os.rmdir(temp_source_loc)\n except:\n print(\"-> Error: Problem removing temp directory in '/sources'\")\n\n\ndef filter_sources(location, mask_sources=False):\n print('\\n-> Filtering out non PSF-like sources...')\n filters.spread_model_filter(location)\n print('-> Filtering out diveted detections...')\n images = glob.glob(location + '/data/*_A_.fits')\n for i in images:\n indices = filters.divot(i)\n filters.update_filtered_sources(location, indices)\n residuals = glob.glob('%s/residuals/*_residual_.fits' % location)\n if mask_sources == True:\n for r in residuals:\n filters.mask_sources_image(r)\n\n\ndef MR_filter_sources(location):\n with open('%s/sources/MR_sources.txt' % location, 'r') as MR_src:\n MR_lines = MR_src.readlines()\n MR_lines.insert(0, 'MR.fits\\n')\n with open('%s/sources/MR_sources.txt' % location, 'w+') as MR_src:\n for line in MR_lines:\n MR_src.write(line)\n MR_loc = '%s/residuals/MR.fits' % location\n print('\\n-> Filtering out non PSF-like sources in master residual...')\n filters.spread_model_filter(location, MR=True)\n print('-> Filtering out diveted detections in master residual...')\n indices = filters.divot(MR_loc, MR=True)\n filters.update_filtered_sources(location, indices, MR=True)\n filters.write_total_sources(location)\n\n\ndef append_negative_sources(residual, MR=False):\n location = residual.split('/')[:-2]\n location = '/'.join(location)\n name = residual.split('/')[-1]\n name = name.replace('.fits', '')\n if MR == True:\n with open('%s/sources/%s_sources_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/%s_sources.txt' % (location, name), 'a'\n ) as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/%s_sources_2.txt' % (location, name))\n else:\n with open('%s/sources/temp/%s_2.txt' % (location, name), 'r'\n ) as neg_sources:\n lines = neg_sources.readlines()\n with open('%s/sources/temp/%s.txt' % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove('%s/sources/temp/%s_2.txt' % (location, name))\n",
"step-5": "#!/usr/bin/env python2\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jun 4 13:04:32 2018\n\n@author: andrew\n\"\"\"\n\nimport os\nimport glob\nimport initialize\nimport psf\nfrom astropy.io import fits\nimport filters\nimport numpy as np\nimport sys\nimport MR\nfrom tqdm import tqdm\n\ndef sextractor_MR(location, MR_method='swarp', use_config_file=True):\n '''\n runs SExtractor on master residual\n '''\n check_MR = glob.glob(\"%s/residuals/MR.fits\" % (location))\n if check_MR == []:\n print(\"-> Master residual does not exist, creating it first...\")\n if use_config_file == True:\n MR_method = initialize.get_config_value('MR_method')\n MR.MR(location, MR_method)\n master_res = glob.glob(\"%s/residuals/MR.fits\" % (location))\n temp = glob.glob(\"%s/templates/*.fits\" % (location))\n if len(master_res) == 1:\n if len(temp) == 1:\n MR = master_res[0]\n template = temp[0]\n temp_name = template.split('/')[-1]\n temp_name = temp_name[:-5]\n MR_hdu = fits.open(MR)\n MR_header = MR_hdu[0].header\n saturate = MR_header['SATURATE']\n temp_hdr = fits.getheader(template)\n pixscale = temp_hdr['PIXSCALE']\n MR_hdu.close()\n FWHM = psf.fwhm_template(template)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = \"PARAMETERS_NAME\" + \" \" + location + \"/configs/default.param\" + \"\\n\"\n data[20] = \"FILTER_NAME\" + \" \" + location + \"/configs/default.conv\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print(\"\\n-> SExtracting master residual...\")\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = \"SATUR_LEVEL\" + \" \" + str(saturate) + \"\\n\"\n data[62] = \"SEEING_FWHM\" + \" \" + str(FWHM) + \"\\n\"\n data[106] = \"PSF_NAME\" + \" \" + location + \"/psf/\" + temp_name + \".psf\" + \"\\n\"\n data[58] = \"PIXEL_SCALE\" + \" \" + str(pixscale) + \"\\n\"\n data[32] = \"WEIGHT_IMAGE\" + \" \" + \"%s[1]\" % (MR) + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system(\"sextractor %s[0]> %s/sources/MR_sources.txt -c %s\" % (MR, location, config_loc))\n temp_hdu_data = fits.PrimaryHDU((fits.getdata(MR))*-1, header=fits.getheader(MR))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(MR, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto(\"%s/residuals/MR_neg.fits\" % (location))\n os.system(\"sextractor %s/residuals/MR_neg.fits[0]> %s/sources/MR_sources_2.txt -c %s\" % (location, location, config_loc))\n append_negative_sources(MR, MR=True)\n MR_filter_sources(location)\n else:\n print(\"-> Error: Problem with number of template images\\n-> Could not finish SExtracting master residual\")\n else:\n print(\"-> Error: Problem with number of master residuals\\n-> Could not finish SExtracting master residual\")\n\ndef sextractor(location):\n '''\n runs SExtractor on all residual images\n '''\n x = 0\n sources = location + \"/sources\"\n residuals = location + \"/residuals\"\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n length = len(residuals) + 1\n if check == False:\n os.system(\"mkdir %s\" % (sources))\n os.system(\"mkdir %s/temp\" % (sources))\n else:\n if check_temp == False:\n os.system(\"mkdir %s/temp\" % (sources))\n images = glob.glob(residuals + \"/*_residual_.fits\")\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = \"PARAMETERS_NAME\" + \" \" + location + \"/configs/default.param\" + \"\\n\"\n data[20] = \"FILTER_NAME\" + \" \" + location + \"/configs/default.conv\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print(\"-> Converting all residual masks into weight maps...\\n\")\n for r in tqdm(images):\n weight = weight_map(r)\n hdu = fits.open(r, mode='update')\n data = hdu[0].data\n hdr = hdu[0].header\n try:\n if hdr['WEIGHT'] == 'N':\n hdr.set('WEIGHT','Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n except KeyError:\n hdr.set('WEIGHT','Y')\n hduData = fits.PrimaryHDU(data, header=hdr)\n hduWeight = fits.ImageHDU(weight)\n hduList = fits.HDUList([hduData, hduWeight])\n hduList.writeto(r, overwrite=True)\n hdu.close()\n try:\n if fits.getval(r, 'NORM') == 'N':\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n except KeyError:\n fits.setval(r, 'NORM', value='Y')\n MR.normalize(r)\n print(\"\\n-> SExtracting residual images...\")\n for i in images:\n name = i[length:-5]\n data_name = location + '/data/' + name.replace('residual_','') + '.fits'\n FWHM = psf.fwhm(data_name)\n im_hdu = fits.open(data_name)\n im_header = im_hdu[0].header\n saturate = im_header['SATURATE']\n pixscale = im_header['PIXSCALE']\n im_hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[51] = \"SATUR_LEVEL\" + \" \" + str(saturate) + \"\\n\"\n data[62] = \"SEEING_FWHM\" + \" \" + str(FWHM) + \"\\n\"\n data[106] = \"PSF_NAME\" + \" \" + location + \"/psf/\" + name[:-9] + \".psf\" + \"\\n\"\n data[58] = \"PIXEL_SCALE\" + \" \" + str(pixscale) + \"\\n\"\n data[32] = \"WEIGHT_IMAGE\" + \" \" + \"%s[1]\" % (i) + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system(\"sextractor %s[0]> %s/temp/%s.txt -c %s\" % (i, sources, name, config_loc))\n temp_hdu_data = fits.PrimaryHDU((fits.getdata(i))*-1, header=fits.getheader(i))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto(\"%s/residuals/temp.fits\" % (location))\n os.system(\"sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s\" % (location, sources, name, config_loc))\n append_negative_sources(i)\n os.remove(\"%s/residuals/temp.fits\" % (location))\n x += 1\n per = float(x)/float(len(images)) * 100\n print(\"\\t %.1f%% sextracted...\" % (per))\n print(\"-> SExtracted %d images, catalogues placed in 'sources' directory\\n\" % (len(images)))\n print(\"-> Filtering source catalogs...\\n\")\n src_join(location)\n filter_sources(location)\n \ndef sextractor_sim(image):\n location = image.split('/')[:-2]\n location = '/'.join(location)\n sources = location + \"/sources\"\n check = os.path.exists(sources)\n check_temp = os.path.exists(sources + '/temp')\n if check == False:\n os.system(\"mkdir %s\" % (sources))\n os.system(\"mkdir %s/temp\" % (sources))\n else:\n if check_temp == False:\n os.system(\"mkdir %s/temp\" % (sources))\n initialize.create_configs(location)\n config_loc = location + '/configs/default.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = \"PARAMETERS_NAME\" + \" \" + location + \"/configs/default.param\" + \"\\n\"\n data[20] = \"FILTER_NAME\" + \" \" + location + \"/configs/default.conv\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print(\"\\n-> SExtracting fake image...\")\n name = image.split('/')[-1]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[106] = \"PSF_NAME\" + \" \" + location + \"/psf/\" + name[:-5] + \".psf\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system(\"sextractor %s[0]> %s/temp/%s.txt -c %s\" % (image, sources, name, config_loc))\n temp_hdu_data = fits.PrimaryHDU((fits.getdata(image))*-1, header=fits.getheader(image))\n temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))\n temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])\n temp_hdu_list.writeto(\"%s/residuals/temp.fits\")\n os.system(\"sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s\" % (location, sources, name, config_loc))\n os.remove(\"%s/residuals/temp.fits\" % (location))\n src_join(location)\n filter_sources(location)\n \ndef sextractor_psf(location):\n x = 0\n psf_loc = location + \"/psf\"\n data = location + \"/data\"\n templates = location + \"/templates\"\n check = os.path.exists(psf_loc)\n if check == False:\n os.system(\"mkdir %s\" % (psf_loc))\n temps = glob.glob(templates + \"/*.fits\")\n images = glob.glob(data + \"/*_A_.fits\")\n for t in temps:\n images.append(t)\n cats = glob.glob(location + '/psf/*.cat')\n images_names = [(i.split('/')[-1])[:-5] for i in images]\n cats_names = [(c.split('/')[-1])[:-4] for c in cats]\n imageCats = [im for im in images_names if im not in cats_names]\n images = []\n if temps == []:\n temps.append('')\n for imcats in imageCats:\n if imcats == (temps[0].split('/')[-1])[:-5]:\n images.append(temps[0])\n else:\n images.append(location+'/data/'+imcats+'.fits')\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = \"PARAMETERS_NAME\" + \" \" + location + \"/configs/default.psfex\" + \"\\n\"\n data[19] = \"FILTER_NAME\" + \" \" + location + \"/configs/default.conv\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print(\"\\n-> Creating PSF catalogs...\")\n if len(temps) == 1:\n for i in images:\n name = i.split('/')[-1][:-5]\n hdu = fits.open(i)\n hdr = hdu[0].header\n pixscale = hdr['PIXSCALE']\n hdu.close()\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = \"CATALOG_NAME\" + \" \" + psf_loc + \"/\" + name + \".cat\" + \"\\n\"\n data[44] = \"PIXEL_SCALE\" + \" \" + str(pixscale) + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system(\"sextractor %s[0] -c %s\" % (i, config_loc))\n x += 1\n per = float(x)/float(len(images)) * 100\n print(\"\\t %.1f%% sextracted...\" % (per))\n print(\"-> SExtracted %d images, catalogues placed in 'psf' directory\\n\" % (len(images)))\n else:\n print(\"\\n-> Error: Problem with number of template images\\n\")\n sys.exit()\n return images\n\ndef sextractor_psf_sim(location, image):\n psf_loc = location + \"/psf\"\n data = location + \"/data\"\n check = os.path.exists(psf_loc)\n length = len(data) + 1\n if check == False:\n os.system(\"mkdir %s\" % (psf_loc))\n initialize.create_configs(location)\n config_loc = location + '/configs/psf.sex'\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[9] = \"PARAMETERS_NAME\" + \" \" + location + \"/configs/default.psfex\" + \"\\n\"\n data[20] = \"FILTER_NAME\" + \" \" + location + \"/configs/default.conv\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n print(\"\\n-> Creating PSF catalog of fake image...\")\n name = image[length:-5]\n with open(config_loc, 'r') as config:\n data = config.readlines()\n config.close()\n data[6] = \"CATALOG_NAME\" + \" \" + psf_loc + \"/\" + name + \".cat\" + \"\\n\"\n with open(config_loc, 'w') as config:\n config.writelines(data)\n config.close()\n os.system(\"sextractor %s[0] -c %s\" % (image, config_loc))\n \ndef weight_map(image):\n hdu = fits.open(image)\n hduMask = hdu[1].data\n zeroMask = np.zeros(hduMask.shape)\n weightMap = (np.logical_not(np.logical_or(hduMask,zeroMask))).astype(float)\n hdu.close()\n return weightMap\n\ndef src_join(location):\n source_loc = location + '/sources'\n temp_source_loc = source_loc + '/temp'\n temp_source_files = glob.glob(temp_source_loc + '/*.txt')\n image_names = filters.get_image_names(location)\n for file in temp_source_files:\n with open(file, 'r') as fl:\n data = fl.readlines()\n data = [str(file.replace('txt','fits')[len(source_loc)+6:]) + '\\n'] + data\n data.append(\"\\n\\n\\n\")\n with open(source_loc + '/sources.txt', 'a+') as s:\n if data[0] not in image_names:\n s.writelines(data)\n os.remove(file)\n try:\n os.rmdir(temp_source_loc)\n except:\n print(\"-> Error: Problem removing temp directory in '/sources'\")\n\ndef filter_sources(location, mask_sources=False):\n print(\"\\n-> Filtering out non PSF-like sources...\")\n filters.spread_model_filter(location)\n print(\"-> Filtering out diveted detections...\")\n images = glob.glob(location + '/data/*_A_.fits')\n for i in images:\n indices = filters.divot(i)\n filters.update_filtered_sources(location, indices)\n residuals = glob.glob(\"%s/residuals/*_residual_.fits\" % (location))\n if mask_sources == True:\n for r in residuals:\n filters.mask_sources_image(r)\n \ndef MR_filter_sources(location):\n with open(\"%s/sources/MR_sources.txt\" % (location), 'r') as MR_src:\n MR_lines = MR_src.readlines()\n MR_lines.insert(0, \"MR.fits\\n\")\n with open(\"%s/sources/MR_sources.txt\" % (location), 'w+') as MR_src:\n for line in MR_lines:\n MR_src.write(line)\n MR_loc = \"%s/residuals/MR.fits\" % (location)\n print(\"\\n-> Filtering out non PSF-like sources in master residual...\")\n filters.spread_model_filter(location, MR=True)\n print(\"-> Filtering out diveted detections in master residual...\")\n indices = filters.divot(MR_loc, MR=True)\n filters.update_filtered_sources(location, indices, MR=True)\n filters.write_total_sources(location)\n \ndef append_negative_sources(residual, MR=False):\n location = residual.split('/')[:-2]\n location = '/'.join(location)\n name = residual.split('/')[-1]\n name = name.replace('.fits', '')\n if MR == True:\n with open(\"%s/sources/%s_sources_2.txt\" % (location, name), 'r') as neg_sources:\n lines = neg_sources.readlines()\n with open(\"%s/sources/%s_sources.txt\" % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove(\"%s/sources/%s_sources_2.txt\" % (location, name))\n else:\n with open(\"%s/sources/temp/%s_2.txt\" % (location, name), 'r') as neg_sources:\n lines = neg_sources.readlines()\n with open(\"%s/sources/temp/%s.txt\" % (location, name), 'a') as sources:\n for l in lines:\n if l[0] != '#':\n sources.write(l)\n os.remove(\"%s/sources/temp/%s_2.txt\" % (location, name))",
"step-ids": [
8,
9,
10,
11,
12
]
}
|
[
8,
9,
10,
11,
12
] |
<|reserved_special_token_0|>
class Mov_ZigZag(AbstractMoviment):
<|reserved_special_token_0|>
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
if startcoordinate[0] + ZigZageamento >= coordinates[0
] and self.direct:
coordinates[0] = round(coordinates[0] + speed * dt)
elif startcoordinate[0] - ZigZageamento <= coordinates[0
] and not self.direct:
coordinates[0] = round(coordinates[0] - speed * dt)
else:
self.direct = not self.direct
return coordinates, speed
class Mov_DiagRight(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)
return coordinates, speed
class Mov_DiagLeft(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)
return coordinates, speed
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Mov_ZigZag(AbstractMoviment):
direct = True
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
if startcoordinate[0] + ZigZageamento >= coordinates[0
] and self.direct:
coordinates[0] = round(coordinates[0] + speed * dt)
elif startcoordinate[0] - ZigZageamento <= coordinates[0
] and not self.direct:
coordinates[0] = round(coordinates[0] - speed * dt)
else:
self.direct = not self.direct
return coordinates, speed
class Mov_DiagRight(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)
return coordinates, speed
class Mov_DiagLeft(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)
return coordinates, speed
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Mov_LinearFall(AbstractMoviment):
def move(self, coordinates, speed, lastcoordinate, dt):
coordinates[1] = round(coordinates[1] + speed * dt)
return coordinates, speed
class Mov_ZigZag(AbstractMoviment):
direct = True
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
if startcoordinate[0] + ZigZageamento >= coordinates[0
] and self.direct:
coordinates[0] = round(coordinates[0] + speed * dt)
elif startcoordinate[0] - ZigZageamento <= coordinates[0
] and not self.direct:
coordinates[0] = round(coordinates[0] - speed * dt)
else:
self.direct = not self.direct
return coordinates, speed
class Mov_DiagRight(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)
return coordinates, speed
class Mov_DiagLeft(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)
return coordinates, speed
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class AbstractMoviment(ABC):
<|reserved_special_token_0|>
class Mov_LinearFall(AbstractMoviment):
def move(self, coordinates, speed, lastcoordinate, dt):
coordinates[1] = round(coordinates[1] + speed * dt)
return coordinates, speed
class Mov_ZigZag(AbstractMoviment):
direct = True
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
if startcoordinate[0] + ZigZageamento >= coordinates[0
] and self.direct:
coordinates[0] = round(coordinates[0] + speed * dt)
elif startcoordinate[0] - ZigZageamento <= coordinates[0
] and not self.direct:
coordinates[0] = round(coordinates[0] - speed * dt)
else:
self.direct = not self.direct
return coordinates, speed
class Mov_DiagRight(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)
return coordinates, speed
class Mov_DiagLeft(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100
coordinates[1] = round(coordinates[1] + speed * dt)
coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)
return coordinates, speed
<|reserved_special_token_1|>
from __future__ import annotations
from abc import ABC, abstractmethod
class AbstractMoviment(ABC):
@abstractmethod
def move(self, dt) -> None:
pass
class Mov_LinearFall(AbstractMoviment):
def move(self, coordinates, speed, lastcoordinate, dt):
coordinates[1] = round(coordinates[1] + speed * dt)
return coordinates, speed
class Mov_ZigZag(AbstractMoviment):
direct = True
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100 # variacao max da nave
coordinates[1] = round(coordinates[1] + speed * dt)
if (startcoordinate[0] + ZigZageamento >= coordinates[0]) and (
self.direct): # se ele tava na esquerda vai pra direita
coordinates[0] = round(coordinates[0] + speed * dt)
elif (startcoordinate[0] - ZigZageamento <= coordinates[0]) and (not self.direct):
coordinates[0] = round(coordinates[0] - speed * dt)
else:
self.direct = not self.direct
return coordinates, speed
class Mov_DiagRight(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed # seno do angulo, .17 é bom
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100 # variacao max da nave
coordinates[1] = round(coordinates[1] + speed * dt)
# sin(10 degrees) = .17
coordinates[0] = round(coordinates[0] + speed*self.x_speed * dt)
return coordinates, speed
class Mov_DiagLeft(AbstractMoviment):
def __init__(self, x_speed):
self.x_speed = x_speed # seno do angulo, .17 é bom
def move(self, coordinates, speed, startcoordinate, dt):
ZigZageamento = 100 # variacao max da nave
coordinates[1] = round(coordinates[1] + speed * dt)
# sin(10 degrees) = .17
coordinates[0] = round(coordinates[0] - speed*self.x_speed * dt)
return coordinates, speed
|
flexible
|
{
"blob_id": "57935b560108ef0db59de9eee59aa0c908c58b8f",
"index": 2348,
"step-1": "<mask token>\n\n\nclass Mov_ZigZag(AbstractMoviment):\n <mask token>\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n if startcoordinate[0] + ZigZageamento >= coordinates[0\n ] and self.direct:\n coordinates[0] = round(coordinates[0] + speed * dt)\n elif startcoordinate[0] - ZigZageamento <= coordinates[0\n ] and not self.direct:\n coordinates[0] = round(coordinates[0] - speed * dt)\n else:\n self.direct = not self.direct\n return coordinates, speed\n\n\nclass Mov_DiagRight(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)\n return coordinates, speed\n\n\nclass Mov_DiagLeft(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)\n return coordinates, speed\n",
"step-2": "<mask token>\n\n\nclass Mov_ZigZag(AbstractMoviment):\n direct = True\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n if startcoordinate[0] + ZigZageamento >= coordinates[0\n ] and self.direct:\n coordinates[0] = round(coordinates[0] + speed * dt)\n elif startcoordinate[0] - ZigZageamento <= coordinates[0\n ] and not self.direct:\n coordinates[0] = round(coordinates[0] - speed * dt)\n else:\n self.direct = not self.direct\n return coordinates, speed\n\n\nclass Mov_DiagRight(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)\n return coordinates, speed\n\n\nclass Mov_DiagLeft(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)\n return coordinates, speed\n",
"step-3": "<mask token>\n\n\nclass Mov_LinearFall(AbstractMoviment):\n\n def move(self, coordinates, speed, lastcoordinate, dt):\n coordinates[1] = round(coordinates[1] + speed * dt)\n return coordinates, speed\n\n\nclass Mov_ZigZag(AbstractMoviment):\n direct = True\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n if startcoordinate[0] + ZigZageamento >= coordinates[0\n ] and self.direct:\n coordinates[0] = round(coordinates[0] + speed * dt)\n elif startcoordinate[0] - ZigZageamento <= coordinates[0\n ] and not self.direct:\n coordinates[0] = round(coordinates[0] - speed * dt)\n else:\n self.direct = not self.direct\n return coordinates, speed\n\n\nclass Mov_DiagRight(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)\n return coordinates, speed\n\n\nclass Mov_DiagLeft(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)\n return coordinates, speed\n",
"step-4": "<mask token>\n\n\nclass AbstractMoviment(ABC):\n <mask token>\n\n\nclass Mov_LinearFall(AbstractMoviment):\n\n def move(self, coordinates, speed, lastcoordinate, dt):\n coordinates[1] = round(coordinates[1] + speed * dt)\n return coordinates, speed\n\n\nclass Mov_ZigZag(AbstractMoviment):\n direct = True\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n if startcoordinate[0] + ZigZageamento >= coordinates[0\n ] and self.direct:\n coordinates[0] = round(coordinates[0] + speed * dt)\n elif startcoordinate[0] - ZigZageamento <= coordinates[0\n ] and not self.direct:\n coordinates[0] = round(coordinates[0] - speed * dt)\n else:\n self.direct = not self.direct\n return coordinates, speed\n\n\nclass Mov_DiagRight(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] + speed * self.x_speed * dt)\n return coordinates, speed\n\n\nclass Mov_DiagLeft(AbstractMoviment):\n\n def __init__(self, x_speed):\n self.x_speed = x_speed\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100\n coordinates[1] = round(coordinates[1] + speed * dt)\n coordinates[0] = round(coordinates[0] - speed * self.x_speed * dt)\n return coordinates, speed\n",
"step-5": "from __future__ import annotations\nfrom abc import ABC, abstractmethod\n\n\nclass AbstractMoviment(ABC):\n @abstractmethod\n def move(self, dt) -> None:\n pass\n\n\nclass Mov_LinearFall(AbstractMoviment):\n def move(self, coordinates, speed, lastcoordinate, dt):\n coordinates[1] = round(coordinates[1] + speed * dt)\n return coordinates, speed\n\n\nclass Mov_ZigZag(AbstractMoviment):\n direct = True\n\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100 # variacao max da nave\n coordinates[1] = round(coordinates[1] + speed * dt)\n\n if (startcoordinate[0] + ZigZageamento >= coordinates[0]) and (\n self.direct): # se ele tava na esquerda vai pra direita\n coordinates[0] = round(coordinates[0] + speed * dt)\n\n elif (startcoordinate[0] - ZigZageamento <= coordinates[0]) and (not self.direct):\n coordinates[0] = round(coordinates[0] - speed * dt)\n\n else:\n self.direct = not self.direct\n\n return coordinates, speed\n\nclass Mov_DiagRight(AbstractMoviment):\n def __init__(self, x_speed):\n self.x_speed = x_speed # seno do angulo, .17 é bom\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100 # variacao max da nave\n coordinates[1] = round(coordinates[1] + speed * dt)\n # sin(10 degrees) = .17\n coordinates[0] = round(coordinates[0] + speed*self.x_speed * dt)\n\n return coordinates, speed\n\nclass Mov_DiagLeft(AbstractMoviment):\n def __init__(self, x_speed):\n self.x_speed = x_speed # seno do angulo, .17 é bom\n def move(self, coordinates, speed, startcoordinate, dt):\n ZigZageamento = 100 # variacao max da nave\n coordinates[1] = round(coordinates[1] + speed * dt)\n # sin(10 degrees) = .17\n coordinates[0] = round(coordinates[0] - speed*self.x_speed * dt)\n\n return coordinates, speed",
"step-ids": [
8,
9,
11,
12,
15
]
}
|
[
8,
9,
11,
12,
15
] |
<|reserved_special_token_0|>
class CastingAgencyTestCase(unittest.TestCase):
<|reserved_special_token_0|>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_name = os.environ.get('TEST_DATABASE_NAME',
'abc123abc1234')
self.database_path = 'postgres://postgres:postgres@{}/{}'.format(
'localhost:5432', self.database_name)
setup_db(self.app, self.database_path)
setup_db_for_test()
self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +
CASTING_ASSISTANT_TOKEN}
self.casting_director_auth_header = {'Authorization': 'Bearer ' +
CASTING_DIRECTOR_TOKEN}
self.executive_producer_auth_header = {'Authorization': 'Bearer ' +
EXECUTIVE_PRODUCER_TOKEN}
self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,
'gender': 'Male'}
self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}
self.create_movie_success = {'title': 'Captain America: Civil War',
'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}
self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}
self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': []}
self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': [100]}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_create_actors_success_producer(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_create_actors_401_failure_assistant(self):
res = self.client().post('/actors', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
<|reserved_special_token_0|>
def test_update_actors_success_producer(self):
res = self.client().patch('/actors/1', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_success_director(self):
res = self.client().patch('/actors/1', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_401_failure_assistant(self):
res = self.client().patch('/actors/1', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
<|reserved_special_token_0|>
def test_delete_actors_success_producer(self):
res = self.client().delete('/actors/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_success_director(self):
res = self.client().delete('/actors/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/actors/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/actors/100', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_create_movies_success_producer(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_create_movies_401_failure_assistant(self):
res = self.client().post('/movies', headers=self.
casting_assistant_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_update_movies_success_director(self):
res = self.client().patch('/movies/1', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_delete_movies_success_producer(self):
res = self.client().delete('/movies/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['movie_id'], 1)
def test_delete_movies_401_failure_director(self):
res = self.client().delete('/movies/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/movies/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/movies/100', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_actors_by_movies(self):
res = self.client().get('/movies/1/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
def test_404_get_actors_by_movies(self):
res = self.client().get('/movies/100/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
def test_404_get_movies_by_actors(self):
res = self.client().get('/actors/100/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CastingAgencyTestCase(unittest.TestCase):
<|reserved_special_token_0|>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_name = os.environ.get('TEST_DATABASE_NAME',
'abc123abc1234')
self.database_path = 'postgres://postgres:postgres@{}/{}'.format(
'localhost:5432', self.database_name)
setup_db(self.app, self.database_path)
setup_db_for_test()
self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +
CASTING_ASSISTANT_TOKEN}
self.casting_director_auth_header = {'Authorization': 'Bearer ' +
CASTING_DIRECTOR_TOKEN}
self.executive_producer_auth_header = {'Authorization': 'Bearer ' +
EXECUTIVE_PRODUCER_TOKEN}
self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,
'gender': 'Male'}
self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}
self.create_movie_success = {'title': 'Captain America: Civil War',
'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}
self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}
self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': []}
self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': [100]}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def test_get_actors(self):
res = self.client().get('/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
<|reserved_special_token_0|>
def test_create_actors_success_producer(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_create_actors_401_failure_assistant(self):
res = self.client().post('/actors', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_if_create_actor_fails(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_fail)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_update_actors_success_producer(self):
res = self.client().patch('/actors/1', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_success_director(self):
res = self.client().patch('/actors/1', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_401_failure_assistant(self):
res = self.client().patch('/actors/1', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_update_actors_404_failure(self):
res = self.client().patch('/actors/100', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_actors_success_producer(self):
res = self.client().delete('/actors/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_success_director(self):
res = self.client().delete('/actors/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/actors/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/actors/100', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_movies(self):
res = self.client().get('/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movies']))
def test_create_movies_401_failure_director(self):
res = self.client().post('/movies', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_create_movies_success_producer(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_create_movies_401_failure_assistant(self):
res = self.client().post('/movies', headers=self.
casting_assistant_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_create_movie_fails_incomplete_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_1)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_422_create_movie_fails_no_actor_input_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_2)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_404_create_movie_fails_wrong_actor_id(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_3)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
def test_update_movies_success_director(self):
res = self.client().patch('/movies/1', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
<|reserved_special_token_0|>
def test_update_movies_404_failure(self):
res = self.client().patch('/movies/100', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_movies_success_producer(self):
res = self.client().delete('/movies/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['movie_id'], 1)
def test_delete_movies_401_failure_director(self):
res = self.client().delete('/movies/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/movies/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/movies/100', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_actors_by_movies(self):
res = self.client().get('/movies/1/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
def test_404_get_actors_by_movies(self):
res = self.client().get('/movies/100/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
def test_404_get_movies_by_actors(self):
res = self.client().get('/actors/100/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CastingAgencyTestCase(unittest.TestCase):
<|reserved_special_token_0|>
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_name = os.environ.get('TEST_DATABASE_NAME',
'abc123abc1234')
self.database_path = 'postgres://postgres:postgres@{}/{}'.format(
'localhost:5432', self.database_name)
setup_db(self.app, self.database_path)
setup_db_for_test()
self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +
CASTING_ASSISTANT_TOKEN}
self.casting_director_auth_header = {'Authorization': 'Bearer ' +
CASTING_DIRECTOR_TOKEN}
self.executive_producer_auth_header = {'Authorization': 'Bearer ' +
EXECUTIVE_PRODUCER_TOKEN}
self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,
'gender': 'Male'}
self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}
self.create_movie_success = {'title': 'Captain America: Civil War',
'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}
self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}
self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': []}
self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': [100]}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def test_get_actors(self):
res = self.client().get('/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
<|reserved_special_token_0|>
def test_create_actors_success_producer(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_create_actors_401_failure_assistant(self):
res = self.client().post('/actors', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_if_create_actor_fails(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_fail)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_update_actors_success_producer(self):
res = self.client().patch('/actors/1', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_success_director(self):
res = self.client().patch('/actors/1', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_401_failure_assistant(self):
res = self.client().patch('/actors/1', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_update_actors_404_failure(self):
res = self.client().patch('/actors/100', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_actors_success_producer(self):
res = self.client().delete('/actors/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_success_director(self):
res = self.client().delete('/actors/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/actors/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/actors/100', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_movies(self):
res = self.client().get('/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movies']))
def test_create_movies_401_failure_director(self):
res = self.client().post('/movies', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_create_movies_success_producer(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_create_movies_401_failure_assistant(self):
res = self.client().post('/movies', headers=self.
casting_assistant_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_create_movie_fails_incomplete_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_1)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_422_create_movie_fails_no_actor_input_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_2)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_404_create_movie_fails_wrong_actor_id(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_3)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
def test_update_movies_success_director(self):
res = self.client().patch('/movies/1', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
<|reserved_special_token_0|>
def test_update_movies_404_failure(self):
res = self.client().patch('/movies/100', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_movies_success_producer(self):
res = self.client().delete('/movies/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['movie_id'], 1)
def test_delete_movies_401_failure_director(self):
res = self.client().delete('/movies/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/movies/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/movies/100', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_actors_by_movies(self):
res = self.client().get('/movies/1/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
def test_404_get_actors_by_movies(self):
res = self.client().get('/movies/100/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_movies_by_actors(self):
res = self.client().get('/actors/1/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movies']))
def test_404_get_movies_by_actors(self):
res = self.client().get('/actors/100/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CastingAgencyTestCase(unittest.TestCase):
"""This class has the test cases for casting agency web app endpoints"""
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_name = os.environ.get('TEST_DATABASE_NAME',
'abc123abc1234')
self.database_path = 'postgres://postgres:postgres@{}/{}'.format(
'localhost:5432', self.database_name)
setup_db(self.app, self.database_path)
setup_db_for_test()
self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +
CASTING_ASSISTANT_TOKEN}
self.casting_director_auth_header = {'Authorization': 'Bearer ' +
CASTING_DIRECTOR_TOKEN}
self.executive_producer_auth_header = {'Authorization': 'Bearer ' +
EXECUTIVE_PRODUCER_TOKEN}
self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,
'gender': 'Male'}
self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}
self.create_movie_success = {'title': 'Captain America: Civil War',
'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}
self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}
self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': []}
self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',
'release_date': '27/04/2018', 'actors_ids': [100]}
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
self.db.create_all()
def test_get_actors(self):
res = self.client().get('/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
def test_create_actors_success_director(self):
res = self.client().post('/actors', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_create_actors_success_producer(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_create_actors_401_failure_assistant(self):
res = self.client().post('/actors', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_if_create_actor_fails(self):
res = self.client().post('/actors', headers=self.
executive_producer_auth_header, json=self.create_actor_fail)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_update_actors_success_producer(self):
res = self.client().patch('/actors/1', headers=self.
executive_producer_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_success_director(self):
res = self.client().patch('/actors/1', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actor']))
def test_update_actors_401_failure_assistant(self):
res = self.client().patch('/actors/1', headers=self.
casting_assistant_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_update_actors_404_failure(self):
res = self.client().patch('/actors/100', headers=self.
casting_director_auth_header, json=self.create_actor_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_actors_success_producer(self):
res = self.client().delete('/actors/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_success_director(self):
res = self.client().delete('/actors/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['actor_id'], 1)
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/actors/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/actors/100', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_movies(self):
res = self.client().get('/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movies']))
def test_create_movies_401_failure_director(self):
res = self.client().post('/movies', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_create_movies_success_producer(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_create_movies_401_failure_assistant(self):
res = self.client().post('/movies', headers=self.
casting_assistant_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_422_create_movie_fails_incomplete_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_1)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_422_create_movie_fails_no_actor_input_info(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_2)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'unprocessable')
def test_404_create_movie_fails_wrong_actor_id(self):
res = self.client().post('/movies', headers=self.
executive_producer_auth_header, json=self.create_movie_fail_3)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_update_movies_success_producer(self):
res = self.client().patch('/movies/1', headers=self.
executive_producer_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_update_movies_success_director(self):
res = self.client().patch('/movies/1', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movie']))
def test_update_movies_401_failure_assistant(self):
res = self.client().patch('/movies/1', headers=self.
casting_assistant_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_update_movies_404_failure(self):
res = self.client().patch('/movies/100', headers=self.
casting_director_auth_header, json=self.create_movie_success)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_delete_movies_success_producer(self):
res = self.client().delete('/movies/1', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertEqual(res_data['movie_id'], 1)
def test_delete_movies_401_failure_director(self):
res = self.client().delete('/movies/1', headers=self.
casting_director_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete('/movies/1', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'Permission missing.')
def test_delete_actors_404_failure(self):
res = self.client().delete('/movies/100', headers=self.
executive_producer_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_actors_by_movies(self):
res = self.client().get('/movies/1/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['actors']))
def test_404_get_actors_by_movies(self):
res = self.client().get('/movies/100/actors', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
def test_get_movies_by_actors(self):
res = self.client().get('/actors/1/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data['success'], True)
self.assertTrue(len(res_data['movies']))
def test_404_get_movies_by_actors(self):
res = self.client().get('/actors/100/movies', headers=self.
casting_assistant_auth_header)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data['success'], False)
self.assertEqual(res_data['message'], 'resource not found')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import os
import unittest
import json
from flask_sqlalchemy import SQLAlchemy
from app import create_app
from models import *
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
# auth tokens should be updated before running tests,
# make sure update the tokens in setup.sh
# read the README to know more details
CASTING_ASSISTANT_TOKEN = os.environ.get(
"CASTING_ASSISTANT_TOKEN",
"abc123abc1234"
)
CASTING_DIRECTOR_TOKEN = os.environ.get(
"CASTING_DIRECTOR_TOKEN",
"abc123abc1234"
)
EXECUTIVE_PRODUCER_TOKEN = os.environ.get(
"EXECUTIVE_PRODUCER_TOKEN",
"abc123abc1234"
)
class CastingAgencyTestCase(unittest.TestCase):
"""This class has the test cases for casting agency web app endpoints"""
def setUp(self):
"""Define test variables and initialize app."""
self.app = create_app()
self.client = self.app.test_client
self.database_name = os.environ.get(
"TEST_DATABASE_NAME",
"abc123abc1234"
)
self.database_path = "postgres://postgres:postgres@{}/{}".format(
"localhost:5432", self.database_name
)
setup_db(self.app, self.database_path)
# drop db, create and populate with test data
setup_db_for_test()
self.casting_assistant_auth_header = {
"Authorization": "Bearer " + CASTING_ASSISTANT_TOKEN
}
self.casting_director_auth_header = {
"Authorization": "Bearer " + CASTING_DIRECTOR_TOKEN
}
self.executive_producer_auth_header = {
"Authorization": "Bearer " + EXECUTIVE_PRODUCER_TOKEN
}
self.create_actor_success = {
"name": "Chris Hemsworth",
"age": 37,
"gender": "Male",
}
self.create_actor_fail = {
"name": "Chris Evans",
"age": 39,
}
self.create_movie_success = {
"title": "Captain America: Civil War",
"release_date": "12/04/2016",
"actors_ids": [1, 2, 3],
}
self.create_movie_fail_1 = {
"title": "Avenger: Infinity War",
}
self.create_movie_fail_2 = {
"title": "Avenger: Infinity War",
"release_date": "27/04/2018",
"actors_ids": [],
}
self.create_movie_fail_3 = {
"title": "Avenger: Infinity War",
"release_date": "27/04/2018",
"actors_ids": [100],
}
# binds the app to the current context
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
# create all tables
self.db.create_all()
# test get actors endpoint
def test_get_actors(self):
res = self.client().get(
"/actors",
headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actors"]))
# test create actor endpoint with casting director auth token
def test_create_actors_success_director(self):
res = self.client().post(
"/actors",
headers=self.casting_director_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actor"]))
# test create actor endpoint with executive producer auth token
def test_create_actors_success_producer(self):
res = self.client().post(
"/actors",
headers=self.executive_producer_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actor"]))
# create actor fails due authentication failure with casting
# assistant auth token
def test_create_actors_401_failure_assistant(self):
res = self.client().post(
"/actors",
headers=self.casting_assistant_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# create actor fails due to incomplete input
def test_422_if_create_actor_fails(self):
res = self.client().post(
"/actors",
headers=self.executive_producer_auth_header,
json=self.create_actor_fail,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "unprocessable")
# test update actors with executive producer auth token
def test_update_actors_success_producer(self):
res = self.client().patch(
"/actors/1",
headers=self.executive_producer_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actor"]))
# test update actors with casting director auth token
def test_update_actors_success_director(self):
res = self.client().patch(
"/actors/1",
headers=self.casting_director_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actor"]))
# update actor fails due authentication failure
# with casting assitant auth token
def test_update_actors_401_failure_assistant(self):
res = self.client().patch(
"/actors/1",
headers=self.casting_assistant_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# test update actor faiure if actor with id doesnot
# exists in database
def test_update_actors_404_failure(self):
res = self.client().patch(
"/actors/100",
headers=self.casting_director_auth_header,
json=self.create_actor_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test successfull delete actor with executive producer auth token
def test_delete_actors_success_producer(self):
res = self.client().delete(
"/actors/1", headers=self.executive_producer_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertEqual(res_data["actor_id"], 1)
# test successfull delete actor with casting director auth token
def test_delete_actors_success_director(self):
res = self.client().delete(
"/actors/1", headers=self.casting_director_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertEqual(res_data["actor_id"], 1)
# delete actor fails due authentication failure
# with casting director auth token
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete(
"/actors/1", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# delete actor failure if actor with input
# id doesnot exits
def test_delete_actors_404_failure(self):
res = self.client().delete(
"/actors/100", headers=self.casting_director_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test get movie endpoint
def test_get_movies(self):
res = self.client().get(
"/movies",
headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["movies"]))
# test create movie authentication failure
# with casting director auth token
def test_create_movies_401_failure_director(self):
res = self.client().post(
"/movies",
headers=self.casting_director_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# test create movies success with executive producer
# auth token
def test_create_movies_success_producer(self):
res = self.client().post(
"/movies",
headers=self.executive_producer_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["movie"]))
# create actor fails due authentication failure
# with casting assistant auth token
def test_create_movies_401_failure_assistant(self):
res = self.client().post(
"/movies",
headers=self.casting_assistant_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# create actor fails due to incomplete input
def test_422_create_movie_fails_incomplete_info(self):
res = self.client().post(
"/movies",
headers=self.executive_producer_auth_header,
json=self.create_movie_fail_1,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "unprocessable")
# create movie fails due to incomplete input, no input actor ids
def test_422_create_movie_fails_no_actor_input_info(self):
res = self.client().post(
"/movies",
headers=self.executive_producer_auth_header,
json=self.create_movie_fail_2,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 422)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "unprocessable")
# create movie fails due to wrong actor id input
def test_404_create_movie_fails_wrong_actor_id(self):
res = self.client().post(
"/movies",
headers=self.executive_producer_auth_header,
json=self.create_movie_fail_3,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test update movie success with executive producer
# auth token
def test_update_movies_success_producer(self):
res = self.client().patch(
"/movies/1",
headers=self.executive_producer_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["movie"]))
# test update movies success with casting
# director auth token
def test_update_movies_success_director(self):
res = self.client().patch(
"/movies/1",
headers=self.casting_director_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["movie"]))
# update actor fails due authentication failure
# with casting assitant auth token
def test_update_movies_401_failure_assistant(self):
res = self.client().patch(
"/movies/1",
headers=self.casting_assistant_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# test update movies failure if movie with
# input id does not exists
def test_update_movies_404_failure(self):
res = self.client().patch(
"/movies/100",
headers=self.casting_director_auth_header,
json=self.create_movie_success,
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test delete movies success with executive producer
# auth token
def test_delete_movies_success_producer(self):
res = self.client().delete(
"/movies/1", headers=self.executive_producer_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertEqual(res_data["movie_id"], 1)
# test delete movies failure with casting director
# auth token
def test_delete_movies_401_failure_director(self):
res = self.client().delete(
"/movies/1", headers=self.casting_director_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# test delete actor fails due authentication failure
# with casting assitant auth token
def test_delete_actors_401_failure_assistant(self):
res = self.client().delete(
"/movies/1", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 401)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "Permission missing.")
# test delete actor failure if actor with input id
# doesnot exists
def test_delete_actors_404_failure(self):
res = self.client().delete(
"/movies/100", headers=self.executive_producer_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test get actor by movies success
def test_get_actors_by_movies(self):
res = self.client().get(
"/movies/1/actors", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["actors"]))
# test get actor by movies failure if movie
# with input id does not exits
def test_404_get_actors_by_movies(self):
res = self.client().get(
"/movies/100/actors", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# test get movies by actor success
def test_get_movies_by_actors(self):
res = self.client().get(
"/actors/1/movies", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 200)
self.assertEqual(res_data["success"], True)
self.assertTrue(len(res_data["movies"]))
# test get movies by actor failure if actor
# with input id does not exists
def test_404_get_movies_by_actors(self):
res = self.client().get(
"/actors/100/movies", headers=self.casting_assistant_auth_header
)
res_data = json.loads(res.data)
self.assertEqual(res.status_code, 404)
self.assertEqual(res_data["success"], False)
self.assertEqual(res_data["message"], "resource not found")
# Make the tests conveniently executable
if __name__ == "__main__":
unittest.main()
|
flexible
|
{
"blob_id": "bae4eb94d561f7aa810718840ff7c2de52cb0d6f",
"index": 3228,
"step-1": "<mask token>\n\n\nclass CastingAgencyTestCase(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n \"\"\"Define test variables and initialize app.\"\"\"\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = os.environ.get('TEST_DATABASE_NAME',\n 'abc123abc1234')\n self.database_path = 'postgres://postgres:postgres@{}/{}'.format(\n 'localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n setup_db_for_test()\n self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +\n CASTING_ASSISTANT_TOKEN}\n self.casting_director_auth_header = {'Authorization': 'Bearer ' +\n CASTING_DIRECTOR_TOKEN}\n self.executive_producer_auth_header = {'Authorization': 'Bearer ' +\n EXECUTIVE_PRODUCER_TOKEN}\n self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,\n 'gender': 'Male'}\n self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}\n self.create_movie_success = {'title': 'Captain America: Civil War',\n 'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}\n self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}\n self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': []}\n self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': [100]}\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n self.db.create_all()\n <mask token>\n <mask token>\n\n def test_create_actors_success_producer(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_create_actors_401_failure_assistant(self):\n res = self.client().post('/actors', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n <mask token>\n\n def test_update_actors_success_producer(self):\n res = self.client().patch('/actors/1', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_success_director(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_401_failure_assistant(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n <mask token>\n\n def test_delete_actors_success_producer(self):\n res = self.client().delete('/actors/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_success_director(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/actors/100', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n <mask token>\n <mask token>\n\n def test_create_movies_success_producer(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_create_movies_401_failure_assistant(self):\n res = self.client().post('/movies', headers=self.\n casting_assistant_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_update_movies_success_director(self):\n res = self.client().patch('/movies/1', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n <mask token>\n <mask token>\n\n def test_delete_movies_success_producer(self):\n res = self.client().delete('/movies/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['movie_id'], 1)\n\n def test_delete_movies_401_failure_director(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/movies/100', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_actors_by_movies(self):\n res = self.client().get('/movies/1/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n\n def test_404_get_actors_by_movies(self):\n res = self.client().get('/movies/100/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n <mask token>\n\n def test_404_get_movies_by_actors(self):\n res = self.client().get('/actors/100/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CastingAgencyTestCase(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n \"\"\"Define test variables and initialize app.\"\"\"\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = os.environ.get('TEST_DATABASE_NAME',\n 'abc123abc1234')\n self.database_path = 'postgres://postgres:postgres@{}/{}'.format(\n 'localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n setup_db_for_test()\n self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +\n CASTING_ASSISTANT_TOKEN}\n self.casting_director_auth_header = {'Authorization': 'Bearer ' +\n CASTING_DIRECTOR_TOKEN}\n self.executive_producer_auth_header = {'Authorization': 'Bearer ' +\n EXECUTIVE_PRODUCER_TOKEN}\n self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,\n 'gender': 'Male'}\n self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}\n self.create_movie_success = {'title': 'Captain America: Civil War',\n 'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}\n self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}\n self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': []}\n self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': [100]}\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n self.db.create_all()\n\n def test_get_actors(self):\n res = self.client().get('/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n <mask token>\n\n def test_create_actors_success_producer(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_create_actors_401_failure_assistant(self):\n res = self.client().post('/actors', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_if_create_actor_fails(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_fail)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_update_actors_success_producer(self):\n res = self.client().patch('/actors/1', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_success_director(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_401_failure_assistant(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_update_actors_404_failure(self):\n res = self.client().patch('/actors/100', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_actors_success_producer(self):\n res = self.client().delete('/actors/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_success_director(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/actors/100', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_movies(self):\n res = self.client().get('/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movies']))\n\n def test_create_movies_401_failure_director(self):\n res = self.client().post('/movies', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_create_movies_success_producer(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_create_movies_401_failure_assistant(self):\n res = self.client().post('/movies', headers=self.\n casting_assistant_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_create_movie_fails_incomplete_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_1)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_422_create_movie_fails_no_actor_input_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_2)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_404_create_movie_fails_wrong_actor_id(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_3)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n <mask token>\n\n def test_update_movies_success_director(self):\n res = self.client().patch('/movies/1', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n <mask token>\n\n def test_update_movies_404_failure(self):\n res = self.client().patch('/movies/100', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_movies_success_producer(self):\n res = self.client().delete('/movies/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['movie_id'], 1)\n\n def test_delete_movies_401_failure_director(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/movies/100', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_actors_by_movies(self):\n res = self.client().get('/movies/1/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n\n def test_404_get_actors_by_movies(self):\n res = self.client().get('/movies/100/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n <mask token>\n\n def test_404_get_movies_by_actors(self):\n res = self.client().get('/actors/100/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CastingAgencyTestCase(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n \"\"\"Define test variables and initialize app.\"\"\"\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = os.environ.get('TEST_DATABASE_NAME',\n 'abc123abc1234')\n self.database_path = 'postgres://postgres:postgres@{}/{}'.format(\n 'localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n setup_db_for_test()\n self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +\n CASTING_ASSISTANT_TOKEN}\n self.casting_director_auth_header = {'Authorization': 'Bearer ' +\n CASTING_DIRECTOR_TOKEN}\n self.executive_producer_auth_header = {'Authorization': 'Bearer ' +\n EXECUTIVE_PRODUCER_TOKEN}\n self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,\n 'gender': 'Male'}\n self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}\n self.create_movie_success = {'title': 'Captain America: Civil War',\n 'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}\n self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}\n self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': []}\n self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': [100]}\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n self.db.create_all()\n\n def test_get_actors(self):\n res = self.client().get('/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n <mask token>\n\n def test_create_actors_success_producer(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_create_actors_401_failure_assistant(self):\n res = self.client().post('/actors', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_if_create_actor_fails(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_fail)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_update_actors_success_producer(self):\n res = self.client().patch('/actors/1', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_success_director(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_401_failure_assistant(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_update_actors_404_failure(self):\n res = self.client().patch('/actors/100', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_actors_success_producer(self):\n res = self.client().delete('/actors/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_success_director(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/actors/100', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_movies(self):\n res = self.client().get('/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movies']))\n\n def test_create_movies_401_failure_director(self):\n res = self.client().post('/movies', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_create_movies_success_producer(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_create_movies_401_failure_assistant(self):\n res = self.client().post('/movies', headers=self.\n casting_assistant_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_create_movie_fails_incomplete_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_1)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_422_create_movie_fails_no_actor_input_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_2)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_404_create_movie_fails_wrong_actor_id(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_3)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n <mask token>\n\n def test_update_movies_success_director(self):\n res = self.client().patch('/movies/1', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n <mask token>\n\n def test_update_movies_404_failure(self):\n res = self.client().patch('/movies/100', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_movies_success_producer(self):\n res = self.client().delete('/movies/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['movie_id'], 1)\n\n def test_delete_movies_401_failure_director(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/movies/100', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_actors_by_movies(self):\n res = self.client().get('/movies/1/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n\n def test_404_get_actors_by_movies(self):\n res = self.client().get('/movies/100/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_movies_by_actors(self):\n res = self.client().get('/actors/1/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movies']))\n\n def test_404_get_movies_by_actors(self):\n res = self.client().get('/actors/100/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass CastingAgencyTestCase(unittest.TestCase):\n \"\"\"This class has the test cases for casting agency web app endpoints\"\"\"\n\n def setUp(self):\n \"\"\"Define test variables and initialize app.\"\"\"\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = os.environ.get('TEST_DATABASE_NAME',\n 'abc123abc1234')\n self.database_path = 'postgres://postgres:postgres@{}/{}'.format(\n 'localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n setup_db_for_test()\n self.casting_assistant_auth_header = {'Authorization': 'Bearer ' +\n CASTING_ASSISTANT_TOKEN}\n self.casting_director_auth_header = {'Authorization': 'Bearer ' +\n CASTING_DIRECTOR_TOKEN}\n self.executive_producer_auth_header = {'Authorization': 'Bearer ' +\n EXECUTIVE_PRODUCER_TOKEN}\n self.create_actor_success = {'name': 'Chris Hemsworth', 'age': 37,\n 'gender': 'Male'}\n self.create_actor_fail = {'name': 'Chris Evans', 'age': 39}\n self.create_movie_success = {'title': 'Captain America: Civil War',\n 'release_date': '12/04/2016', 'actors_ids': [1, 2, 3]}\n self.create_movie_fail_1 = {'title': 'Avenger: Infinity War'}\n self.create_movie_fail_2 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': []}\n self.create_movie_fail_3 = {'title': 'Avenger: Infinity War',\n 'release_date': '27/04/2018', 'actors_ids': [100]}\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n self.db.create_all()\n\n def test_get_actors(self):\n res = self.client().get('/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n\n def test_create_actors_success_director(self):\n res = self.client().post('/actors', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_create_actors_success_producer(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_create_actors_401_failure_assistant(self):\n res = self.client().post('/actors', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_if_create_actor_fails(self):\n res = self.client().post('/actors', headers=self.\n executive_producer_auth_header, json=self.create_actor_fail)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_update_actors_success_producer(self):\n res = self.client().patch('/actors/1', headers=self.\n executive_producer_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_success_director(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actor']))\n\n def test_update_actors_401_failure_assistant(self):\n res = self.client().patch('/actors/1', headers=self.\n casting_assistant_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_update_actors_404_failure(self):\n res = self.client().patch('/actors/100', headers=self.\n casting_director_auth_header, json=self.create_actor_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_actors_success_producer(self):\n res = self.client().delete('/actors/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_success_director(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['actor_id'], 1)\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/actors/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/actors/100', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_movies(self):\n res = self.client().get('/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movies']))\n\n def test_create_movies_401_failure_director(self):\n res = self.client().post('/movies', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_create_movies_success_producer(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_create_movies_401_failure_assistant(self):\n res = self.client().post('/movies', headers=self.\n casting_assistant_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_422_create_movie_fails_incomplete_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_1)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_422_create_movie_fails_no_actor_input_info(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_2)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'unprocessable')\n\n def test_404_create_movie_fails_wrong_actor_id(self):\n res = self.client().post('/movies', headers=self.\n executive_producer_auth_header, json=self.create_movie_fail_3)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_update_movies_success_producer(self):\n res = self.client().patch('/movies/1', headers=self.\n executive_producer_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_update_movies_success_director(self):\n res = self.client().patch('/movies/1', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movie']))\n\n def test_update_movies_401_failure_assistant(self):\n res = self.client().patch('/movies/1', headers=self.\n casting_assistant_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_update_movies_404_failure(self):\n res = self.client().patch('/movies/100', headers=self.\n casting_director_auth_header, json=self.create_movie_success)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_delete_movies_success_producer(self):\n res = self.client().delete('/movies/1', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertEqual(res_data['movie_id'], 1)\n\n def test_delete_movies_401_failure_director(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_director_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete('/movies/1', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'Permission missing.')\n\n def test_delete_actors_404_failure(self):\n res = self.client().delete('/movies/100', headers=self.\n executive_producer_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_actors_by_movies(self):\n res = self.client().get('/movies/1/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['actors']))\n\n def test_404_get_actors_by_movies(self):\n res = self.client().get('/movies/100/actors', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n def test_get_movies_by_actors(self):\n res = self.client().get('/actors/1/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data['success'], True)\n self.assertTrue(len(res_data['movies']))\n\n def test_404_get_movies_by_actors(self):\n res = self.client().get('/actors/100/movies', headers=self.\n casting_assistant_auth_header)\n res_data = json.loads(res.data)\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data['success'], False)\n self.assertEqual(res_data['message'], 'resource not found')\n\n\n<mask token>\n",
"step-5": "import os\nimport unittest\nimport json\nfrom flask_sqlalchemy import SQLAlchemy\n\nfrom app import create_app\nfrom models import *\nfrom dotenv import load_dotenv, find_dotenv\n\nload_dotenv(find_dotenv())\n\n# auth tokens should be updated before running tests,\n# make sure update the tokens in setup.sh\n# read the README to know more details\nCASTING_ASSISTANT_TOKEN = os.environ.get(\n \"CASTING_ASSISTANT_TOKEN\",\n \"abc123abc1234\"\n )\n\nCASTING_DIRECTOR_TOKEN = os.environ.get(\n \"CASTING_DIRECTOR_TOKEN\",\n \"abc123abc1234\"\n )\n\nEXECUTIVE_PRODUCER_TOKEN = os.environ.get(\n \"EXECUTIVE_PRODUCER_TOKEN\",\n \"abc123abc1234\"\n )\n\n\nclass CastingAgencyTestCase(unittest.TestCase):\n \"\"\"This class has the test cases for casting agency web app endpoints\"\"\"\n\n def setUp(self):\n \"\"\"Define test variables and initialize app.\"\"\"\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = os.environ.get(\n \"TEST_DATABASE_NAME\",\n \"abc123abc1234\"\n )\n self.database_path = \"postgres://postgres:postgres@{}/{}\".format(\n \"localhost:5432\", self.database_name\n )\n setup_db(self.app, self.database_path)\n\n # drop db, create and populate with test data\n setup_db_for_test()\n\n self.casting_assistant_auth_header = {\n \"Authorization\": \"Bearer \" + CASTING_ASSISTANT_TOKEN\n }\n\n self.casting_director_auth_header = {\n \"Authorization\": \"Bearer \" + CASTING_DIRECTOR_TOKEN\n }\n\n self.executive_producer_auth_header = {\n \"Authorization\": \"Bearer \" + EXECUTIVE_PRODUCER_TOKEN\n }\n\n self.create_actor_success = {\n \"name\": \"Chris Hemsworth\",\n \"age\": 37,\n \"gender\": \"Male\",\n }\n\n self.create_actor_fail = {\n \"name\": \"Chris Evans\",\n \"age\": 39,\n }\n\n self.create_movie_success = {\n \"title\": \"Captain America: Civil War\",\n \"release_date\": \"12/04/2016\",\n \"actors_ids\": [1, 2, 3],\n }\n\n self.create_movie_fail_1 = {\n \"title\": \"Avenger: Infinity War\",\n }\n\n self.create_movie_fail_2 = {\n \"title\": \"Avenger: Infinity War\",\n \"release_date\": \"27/04/2018\",\n \"actors_ids\": [],\n }\n\n self.create_movie_fail_3 = {\n \"title\": \"Avenger: Infinity War\",\n \"release_date\": \"27/04/2018\",\n \"actors_ids\": [100],\n }\n\n # binds the app to the current context\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n # create all tables\n self.db.create_all()\n\n # test get actors endpoint\n def test_get_actors(self):\n res = self.client().get(\n \"/actors\",\n headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actors\"]))\n\n # test create actor endpoint with casting director auth token\n def test_create_actors_success_director(self):\n res = self.client().post(\n \"/actors\",\n headers=self.casting_director_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actor\"]))\n\n # test create actor endpoint with executive producer auth token\n def test_create_actors_success_producer(self):\n res = self.client().post(\n \"/actors\",\n headers=self.executive_producer_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actor\"]))\n\n # create actor fails due authentication failure with casting\n # assistant auth token\n def test_create_actors_401_failure_assistant(self):\n res = self.client().post(\n \"/actors\",\n headers=self.casting_assistant_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # create actor fails due to incomplete input\n def test_422_if_create_actor_fails(self):\n res = self.client().post(\n \"/actors\",\n headers=self.executive_producer_auth_header,\n json=self.create_actor_fail,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"unprocessable\")\n\n # test update actors with executive producer auth token\n def test_update_actors_success_producer(self):\n res = self.client().patch(\n \"/actors/1\",\n headers=self.executive_producer_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actor\"]))\n\n # test update actors with casting director auth token\n def test_update_actors_success_director(self):\n res = self.client().patch(\n \"/actors/1\",\n headers=self.casting_director_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actor\"]))\n\n # update actor fails due authentication failure\n # with casting assitant auth token\n def test_update_actors_401_failure_assistant(self):\n res = self.client().patch(\n \"/actors/1\",\n headers=self.casting_assistant_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # test update actor faiure if actor with id doesnot\n # exists in database\n def test_update_actors_404_failure(self):\n res = self.client().patch(\n \"/actors/100\",\n headers=self.casting_director_auth_header,\n json=self.create_actor_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test successfull delete actor with executive producer auth token\n def test_delete_actors_success_producer(self):\n res = self.client().delete(\n \"/actors/1\", headers=self.executive_producer_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertEqual(res_data[\"actor_id\"], 1)\n\n # test successfull delete actor with casting director auth token\n def test_delete_actors_success_director(self):\n res = self.client().delete(\n \"/actors/1\", headers=self.casting_director_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertEqual(res_data[\"actor_id\"], 1)\n\n # delete actor fails due authentication failure\n # with casting director auth token\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete(\n \"/actors/1\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # delete actor failure if actor with input\n # id doesnot exits\n def test_delete_actors_404_failure(self):\n res = self.client().delete(\n \"/actors/100\", headers=self.casting_director_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test get movie endpoint\n def test_get_movies(self):\n res = self.client().get(\n \"/movies\",\n headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"movies\"]))\n\n # test create movie authentication failure\n # with casting director auth token\n def test_create_movies_401_failure_director(self):\n res = self.client().post(\n \"/movies\",\n headers=self.casting_director_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # test create movies success with executive producer\n # auth token\n def test_create_movies_success_producer(self):\n res = self.client().post(\n \"/movies\",\n headers=self.executive_producer_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"movie\"]))\n\n # create actor fails due authentication failure\n # with casting assistant auth token\n def test_create_movies_401_failure_assistant(self):\n res = self.client().post(\n \"/movies\",\n headers=self.casting_assistant_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # create actor fails due to incomplete input\n def test_422_create_movie_fails_incomplete_info(self):\n res = self.client().post(\n \"/movies\",\n headers=self.executive_producer_auth_header,\n json=self.create_movie_fail_1,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"unprocessable\")\n\n # create movie fails due to incomplete input, no input actor ids\n def test_422_create_movie_fails_no_actor_input_info(self):\n res = self.client().post(\n \"/movies\",\n headers=self.executive_producer_auth_header,\n json=self.create_movie_fail_2,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 422)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"unprocessable\")\n\n # create movie fails due to wrong actor id input\n def test_404_create_movie_fails_wrong_actor_id(self):\n res = self.client().post(\n \"/movies\",\n headers=self.executive_producer_auth_header,\n json=self.create_movie_fail_3,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test update movie success with executive producer\n # auth token\n def test_update_movies_success_producer(self):\n res = self.client().patch(\n \"/movies/1\",\n headers=self.executive_producer_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"movie\"]))\n\n # test update movies success with casting\n # director auth token\n def test_update_movies_success_director(self):\n res = self.client().patch(\n \"/movies/1\",\n headers=self.casting_director_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"movie\"]))\n\n # update actor fails due authentication failure\n # with casting assitant auth token\n def test_update_movies_401_failure_assistant(self):\n res = self.client().patch(\n \"/movies/1\",\n headers=self.casting_assistant_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # test update movies failure if movie with\n # input id does not exists\n def test_update_movies_404_failure(self):\n res = self.client().patch(\n \"/movies/100\",\n headers=self.casting_director_auth_header,\n json=self.create_movie_success,\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test delete movies success with executive producer\n # auth token\n def test_delete_movies_success_producer(self):\n res = self.client().delete(\n \"/movies/1\", headers=self.executive_producer_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertEqual(res_data[\"movie_id\"], 1)\n\n # test delete movies failure with casting director\n # auth token\n def test_delete_movies_401_failure_director(self):\n res = self.client().delete(\n \"/movies/1\", headers=self.casting_director_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # test delete actor fails due authentication failure\n # with casting assitant auth token\n def test_delete_actors_401_failure_assistant(self):\n res = self.client().delete(\n \"/movies/1\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 401)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"Permission missing.\")\n\n # test delete actor failure if actor with input id\n # doesnot exists\n def test_delete_actors_404_failure(self):\n res = self.client().delete(\n \"/movies/100\", headers=self.executive_producer_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test get actor by movies success\n def test_get_actors_by_movies(self):\n res = self.client().get(\n \"/movies/1/actors\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"actors\"]))\n\n # test get actor by movies failure if movie\n # with input id does not exits\n def test_404_get_actors_by_movies(self):\n res = self.client().get(\n \"/movies/100/actors\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n # test get movies by actor success\n def test_get_movies_by_actors(self):\n res = self.client().get(\n \"/actors/1/movies\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertEqual(res_data[\"success\"], True)\n self.assertTrue(len(res_data[\"movies\"]))\n\n # test get movies by actor failure if actor\n # with input id does not exists\n def test_404_get_movies_by_actors(self):\n res = self.client().get(\n \"/actors/100/movies\", headers=self.casting_assistant_auth_header\n )\n res_data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 404)\n self.assertEqual(res_data[\"success\"], False)\n self.assertEqual(res_data[\"message\"], \"resource not found\")\n\n\n# Make the tests conveniently executable\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
21,
30,
31,
35,
39
]
}
|
[
21,
30,
31,
35,
39
] |
<|reserved_special_token_0|>
def home_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/index.html')
<|reserved_special_token_0|>
def studentsignup_view(request):
form1 = forms.StudentUserForm()
form2 = forms.StudentExtraForm()
mydict = {'form1': form1, 'form2': form2}
if request.method == 'POST':
form1 = forms.StudentUserForm(request.POST)
form2 = forms.StudentExtraForm(request.POST)
if form1.is_valid() and form2.is_valid():
user = form1.save()
user.set_password(user.password)
user.save()
f2 = form2.save(commit=False)
f2.user = user
user2 = f2.save()
my_student_group = Group.objects.get_or_create(name='STUDENT')
my_student_group[0].user_set.add(user)
return HttpResponseRedirect('studentlogin')
return render(request, 'library/student_signup.html', context=mydict)
<|reserved_special_token_0|>
def afterlogin_view(request):
if is_admin(request.user):
return render(request, 'library/admin_afterlogin.html')
else:
return render(request, 'library/student_afterlogin.html')
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def addbook_view(request):
form = forms.BookForm()
if request.method == 'POST':
form = forms.BookForm(request.POST)
if form.is_valid():
user = form.save()
return render(request, 'library/book_added.html')
return render(request, 'library/add_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
<|reserved_special_token_0|>
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewstudent_view(request):
students = models.StudentExtra.objects.all()
return render(request, 'library/view_student.html', {'students': students})
<|reserved_special_token_0|>
def aboutus_view(request):
return render(request, 'library/about_us.html')
<|reserved_special_token_0|>
@login_required(login_url='studentlogin')
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def home_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/index.html')
<|reserved_special_token_0|>
def studentsignup_view(request):
form1 = forms.StudentUserForm()
form2 = forms.StudentExtraForm()
mydict = {'form1': form1, 'form2': form2}
if request.method == 'POST':
form1 = forms.StudentUserForm(request.POST)
form2 = forms.StudentExtraForm(request.POST)
if form1.is_valid() and form2.is_valid():
user = form1.save()
user.set_password(user.password)
user.save()
f2 = form2.save(commit=False)
f2.user = user
user2 = f2.save()
my_student_group = Group.objects.get_or_create(name='STUDENT')
my_student_group[0].user_set.add(user)
return HttpResponseRedirect('studentlogin')
return render(request, 'library/student_signup.html', context=mydict)
def is_admin(user):
return user.groups.filter(name='ADMIN').exists()
def afterlogin_view(request):
if is_admin(request.user):
return render(request, 'library/admin_afterlogin.html')
else:
return render(request, 'library/student_afterlogin.html')
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def addbook_view(request):
form = forms.BookForm()
if request.method == 'POST':
form = forms.BookForm(request.POST)
if form.is_valid():
user = form.save()
return render(request, 'library/book_added.html')
return render(request, 'library/add_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def issuebook_view(request):
form = forms.IssuedBookForm()
if request.method == 'POST':
form = forms.IssuedBookForm(request.POST)
if form.is_valid():
obj = models.IssuedBook()
obj.enrollment = request.POST.get('enrollment2')
obj.isbn = request.POST.get('isbn2')
obj.save()
return render(request, 'library/book_issued.html')
return render(request, 'library/issue_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewissuedbook_view(request):
issuedbooks = models.IssuedBook.objects.all()
li = []
for lb in issuedbooks:
issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month
) + '-' + str(lb.issuedate.year)
expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month
) + '-' + str(lb.expirydate.year)
days = date.today() - lb.issuedate
print(date.today())
d = days.days
fine = 0
if d > 20:
day = d - 20
fine = day * 10
books = list(models.Book.objects.filter(isbn=lb.isbn))
students = list(models.StudentExtra.objects.filter(enrollment=lb.
enrollment))
i = 0
for l in books:
t = students[i].get_name, students[i].enrollment, books[i
].name, books[i].author, issdate, expdate, fine
i = i + 1
li.append(t)
return render(request, 'library/view_issued_book.html', {'li': li})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewstudent_view(request):
students = models.StudentExtra.objects.all()
return render(request, 'library/view_student.html', {'students': students})
<|reserved_special_token_0|>
def aboutus_view(request):
return render(request, 'library/about_us.html')
<|reserved_special_token_0|>
@login_required(login_url='studentlogin')
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def home_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/index.html')
<|reserved_special_token_0|>
def adminclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/admin_click.html')
<|reserved_special_token_0|>
def studentsignup_view(request):
form1 = forms.StudentUserForm()
form2 = forms.StudentExtraForm()
mydict = {'form1': form1, 'form2': form2}
if request.method == 'POST':
form1 = forms.StudentUserForm(request.POST)
form2 = forms.StudentExtraForm(request.POST)
if form1.is_valid() and form2.is_valid():
user = form1.save()
user.set_password(user.password)
user.save()
f2 = form2.save(commit=False)
f2.user = user
user2 = f2.save()
my_student_group = Group.objects.get_or_create(name='STUDENT')
my_student_group[0].user_set.add(user)
return HttpResponseRedirect('studentlogin')
return render(request, 'library/student_signup.html', context=mydict)
def is_admin(user):
return user.groups.filter(name='ADMIN').exists()
def afterlogin_view(request):
if is_admin(request.user):
return render(request, 'library/admin_afterlogin.html')
else:
return render(request, 'library/student_afterlogin.html')
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def addbook_view(request):
form = forms.BookForm()
if request.method == 'POST':
form = forms.BookForm(request.POST)
if form.is_valid():
user = form.save()
return render(request, 'library/book_added.html')
return render(request, 'library/add_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def issuebook_view(request):
form = forms.IssuedBookForm()
if request.method == 'POST':
form = forms.IssuedBookForm(request.POST)
if form.is_valid():
obj = models.IssuedBook()
obj.enrollment = request.POST.get('enrollment2')
obj.isbn = request.POST.get('isbn2')
obj.save()
return render(request, 'library/book_issued.html')
return render(request, 'library/issue_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewissuedbook_view(request):
issuedbooks = models.IssuedBook.objects.all()
li = []
for lb in issuedbooks:
issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month
) + '-' + str(lb.issuedate.year)
expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month
) + '-' + str(lb.expirydate.year)
days = date.today() - lb.issuedate
print(date.today())
d = days.days
fine = 0
if d > 20:
day = d - 20
fine = day * 10
books = list(models.Book.objects.filter(isbn=lb.isbn))
students = list(models.StudentExtra.objects.filter(enrollment=lb.
enrollment))
i = 0
for l in books:
t = students[i].get_name, students[i].enrollment, books[i
].name, books[i].author, issdate, expdate, fine
i = i + 1
li.append(t)
return render(request, 'library/view_issued_book.html', {'li': li})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewstudent_view(request):
students = models.StudentExtra.objects.all()
return render(request, 'library/view_student.html', {'students': students})
@login_required(login_url='studentlogin')
def viewissuedbookbystudent(request):
student = models.StudentExtra.objects.filter(user_id=request.user.id)
issuedbook = models.IssuedBook.objects.filter(enrollment=student[0].
enrollment)
li1 = []
li2 = []
for ib in issuedbook:
books = models.Book.objects.filter(isbn=ib.isbn)
for book in books:
t = request.user, student[0], book.name, book.author
li1.append(t)
issdate = str(ib.issuedate.day) + '-' + str(ib.issuedate.month
) + '-' + str(ib.issuedate.year)
expdate = str(ib.expirydate.day) + '-' + str(ib.expirydate.month
) + '-' + str(ib.expirydate.year)
days = date.today() - ib.issuedate
print(date.today())
d = days.days
fine = 0
if d > 20:
day = d - 20
fine = day * 10
t = issdate, expdate, fine
li2.append(t)
return render(request, 'library/view_issued_book_bystudent.html', {
'li1': li1, 'li2': li2})
def aboutus_view(request):
return render(request, 'library/about_us.html')
def contactus_view(request):
sub = forms.ContactusForm()
if request.method == 'POST':
sub = forms.ContactusForm(request.POST)
if sub.is_valid():
email = sub.cleaned_data['Email']
name = sub.cleaned_data['Name']
message = sub.cleaned_data['Message']
send_mail(str(name) + ' || ' + str(email), message,
EMAIL_HOST_USER, ['[email protected]'], fail_silently
=False)
return render(request, 'library/contact_us_success.html')
return render(request, 'library/contact_us.html', {'form': sub})
@login_required(login_url='studentlogin')
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
def blog_view(request):
return render(request, 'library/blogs.html')
<|reserved_special_token_1|>
from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from . import forms, models
from django.contrib.auth.models import Group
from django.contrib import auth
from django.contrib.auth.decorators import login_required, user_passes_test
from datetime import datetime, timedelta, date
from django.core.mail import send_mail
from librarymanagement.settings import EMAIL_HOST_USER
from django.contrib import messages
def home_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/index.html')
def studentclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/student_click.html')
def adminclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request, 'library/admin_click.html')
def adminsignup_view(request):
form = forms.AdminSigupForm()
if request.method == 'POST':
form = forms.AdminSigupForm(request.POST)
if form.is_valid():
user = form.save()
user.set_password(user.password)
user.save()
my_admin_group = Group.objects.get_or_create(name='ADMIN')
my_admin_group[0].user_set.add(user)
return HttpResponseRedirect('adminlogin')
return render(request, 'library/admin_signup.html', {'form': form})
def studentsignup_view(request):
form1 = forms.StudentUserForm()
form2 = forms.StudentExtraForm()
mydict = {'form1': form1, 'form2': form2}
if request.method == 'POST':
form1 = forms.StudentUserForm(request.POST)
form2 = forms.StudentExtraForm(request.POST)
if form1.is_valid() and form2.is_valid():
user = form1.save()
user.set_password(user.password)
user.save()
f2 = form2.save(commit=False)
f2.user = user
user2 = f2.save()
my_student_group = Group.objects.get_or_create(name='STUDENT')
my_student_group[0].user_set.add(user)
return HttpResponseRedirect('studentlogin')
return render(request, 'library/student_signup.html', context=mydict)
def is_admin(user):
return user.groups.filter(name='ADMIN').exists()
def afterlogin_view(request):
if is_admin(request.user):
return render(request, 'library/admin_afterlogin.html')
else:
return render(request, 'library/student_afterlogin.html')
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def addbook_view(request):
form = forms.BookForm()
if request.method == 'POST':
form = forms.BookForm(request.POST)
if form.is_valid():
user = form.save()
return render(request, 'library/book_added.html')
return render(request, 'library/add_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def issuebook_view(request):
form = forms.IssuedBookForm()
if request.method == 'POST':
form = forms.IssuedBookForm(request.POST)
if form.is_valid():
obj = models.IssuedBook()
obj.enrollment = request.POST.get('enrollment2')
obj.isbn = request.POST.get('isbn2')
obj.save()
return render(request, 'library/book_issued.html')
return render(request, 'library/issue_book.html', {'form': form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewissuedbook_view(request):
issuedbooks = models.IssuedBook.objects.all()
li = []
for lb in issuedbooks:
issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month
) + '-' + str(lb.issuedate.year)
expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month
) + '-' + str(lb.expirydate.year)
days = date.today() - lb.issuedate
print(date.today())
d = days.days
fine = 0
if d > 20:
day = d - 20
fine = day * 10
books = list(models.Book.objects.filter(isbn=lb.isbn))
students = list(models.StudentExtra.objects.filter(enrollment=lb.
enrollment))
i = 0
for l in books:
t = students[i].get_name, students[i].enrollment, books[i
].name, books[i].author, issdate, expdate, fine
i = i + 1
li.append(t)
return render(request, 'library/view_issued_book.html', {'li': li})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewstudent_view(request):
students = models.StudentExtra.objects.all()
return render(request, 'library/view_student.html', {'students': students})
@login_required(login_url='studentlogin')
def viewissuedbookbystudent(request):
student = models.StudentExtra.objects.filter(user_id=request.user.id)
issuedbook = models.IssuedBook.objects.filter(enrollment=student[0].
enrollment)
li1 = []
li2 = []
for ib in issuedbook:
books = models.Book.objects.filter(isbn=ib.isbn)
for book in books:
t = request.user, student[0], book.name, book.author
li1.append(t)
issdate = str(ib.issuedate.day) + '-' + str(ib.issuedate.month
) + '-' + str(ib.issuedate.year)
expdate = str(ib.expirydate.day) + '-' + str(ib.expirydate.month
) + '-' + str(ib.expirydate.year)
days = date.today() - ib.issuedate
print(date.today())
d = days.days
fine = 0
if d > 20:
day = d - 20
fine = day * 10
t = issdate, expdate, fine
li2.append(t)
return render(request, 'library/view_issued_book_bystudent.html', {
'li1': li1, 'li2': li2})
def aboutus_view(request):
return render(request, 'library/about_us.html')
def contactus_view(request):
sub = forms.ContactusForm()
if request.method == 'POST':
sub = forms.ContactusForm(request.POST)
if sub.is_valid():
email = sub.cleaned_data['Email']
name = sub.cleaned_data['Name']
message = sub.cleaned_data['Message']
send_mail(str(name) + ' || ' + str(email), message,
EMAIL_HOST_USER, ['[email protected]'], fail_silently
=False)
return render(request, 'library/contact_us_success.html')
return render(request, 'library/contact_us.html', {'form': sub})
@login_required(login_url='studentlogin')
def viewbook_view(request):
books = models.Book.objects.all()
return render(request, 'library/view_book.html', {'books': books})
def blog_view(request):
return render(request, 'library/blogs.html')
<|reserved_special_token_1|>
from django.shortcuts import render,redirect
from django.http import HttpResponseRedirect
from . import forms,models
from django.contrib.auth.models import Group
from django.contrib import auth
from django.contrib.auth.decorators import login_required,user_passes_test
from datetime import datetime,timedelta,date
from django.core.mail import send_mail
from librarymanagement.settings import EMAIL_HOST_USER
from django.contrib import messages
#from django.contib.auth.models import user, auth
def home_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request,'library/index.html')
# for showing blog content to users
#for showing signup/login button for student
def studentclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request,'library/student_click.html')
#for showing signup/login button for teacher
def adminclick_view(request):
if request.user.is_authenticated:
return HttpResponseRedirect('afterlogin')
return render(request,'library/admin_click.html')
def adminsignup_view(request):
form=forms.AdminSigupForm()
if request.method=='POST':
form=forms.AdminSigupForm(request.POST)
if form.is_valid():
user=form.save()
user.set_password(user.password)
user.save()
my_admin_group = Group.objects.get_or_create(name='ADMIN')
my_admin_group[0].user_set.add(user)
return HttpResponseRedirect('adminlogin')
return render(request,'library/admin_signup.html',{'form':form})
def studentsignup_view(request):
form1=forms.StudentUserForm()
form2=forms.StudentExtraForm()
mydict={'form1':form1,'form2':form2}
if request.method=='POST':
form1=forms.StudentUserForm(request.POST)
form2=forms.StudentExtraForm(request.POST)
if form1.is_valid() and form2.is_valid():
user=form1.save()
user.set_password(user.password)
user.save()
f2=form2.save(commit=False)
f2.user=user
user2=f2.save()
my_student_group = Group.objects.get_or_create(name='STUDENT')
my_student_group[0].user_set.add(user)
return HttpResponseRedirect('studentlogin')
return render(request,'library/student_signup.html',context=mydict)
def is_admin(user):
return user.groups.filter(name='ADMIN').exists()
def afterlogin_view(request):
if is_admin(request.user):
return render(request,'library/admin_afterlogin.html')
else:
return render(request,'library/student_afterlogin.html')
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def addbook_view(request):
#now it is empty book form for sending to html
form=forms.BookForm()
if request.method=='POST':
#now this form have data from html
form=forms.BookForm(request.POST)
if form.is_valid():
user=form.save()
return render(request,'library/book_added.html')
return render(request,'library/add_book.html',{'form':form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewbook_view(request):
books=models.Book.objects.all()
return render(request,'library/view_book.html',{'books':books})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def issuebook_view(request):
form=forms.IssuedBookForm()
if request.method=='POST':
#now this form have data from html
form=forms.IssuedBookForm(request.POST)
if form.is_valid():
obj=models.IssuedBook()
obj.enrollment=request.POST.get('enrollment2')
obj.isbn=request.POST.get('isbn2')
obj.save()
return render(request,'library/book_issued.html')
return render(request,'library/issue_book.html',{'form':form})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewissuedbook_view(request):
issuedbooks=models.IssuedBook.objects.all()
li=[]
for lb in issuedbooks:
issdate=str(lb.issuedate.day)+'-'+str(lb.issuedate.month)+'-'+str(lb.issuedate.year)
expdate=str(lb.expirydate.day)+'-'+str(lb.expirydate.month)+'-'+str(lb.expirydate.year)
#fine calculation
days=(date.today()-lb.issuedate)
print(date.today())
d=days.days
fine=0
if d>20:
day=d-20
fine=day*10
books=list(models.Book.objects.filter(isbn=lb.isbn))
students=list(models.StudentExtra.objects.filter(enrollment=lb.enrollment))
i=0
for l in books:
t=(students[i].get_name,students[i].enrollment,books[i].name,books[i].author,issdate,expdate,fine)
i=i+1
li.append(t)
return render(request,'library/view_issued_book.html',{'li':li})
@login_required(login_url='adminlogin')
@user_passes_test(is_admin)
def viewstudent_view(request):
students=models.StudentExtra.objects.all()
return render(request,'library/view_student.html',{'students':students})
@login_required(login_url='studentlogin')
def viewissuedbookbystudent(request):
student=models.StudentExtra.objects.filter(user_id=request.user.id)
issuedbook=models.IssuedBook.objects.filter(enrollment=student[0].enrollment)
li1=[]
li2=[]
for ib in issuedbook:
books=models.Book.objects.filter(isbn=ib.isbn)
for book in books:
t=(request.user,student[0],book.name,book.author)
li1.append(t)
issdate=str(ib.issuedate.day)+'-'+str(ib.issuedate.month)+'-'+str(ib.issuedate.year)
expdate=str(ib.expirydate.day)+'-'+str(ib.expirydate.month)+'-'+str(ib.expirydate.year)
#fine calculation
days=(date.today()-ib.issuedate)
print(date.today())
d=days.days
fine=0
if d>20:
day=d-20
fine=day*10
t=(issdate,expdate,fine)
li2.append(t)
return render(request,'library/view_issued_book_bystudent.html',{'li1':li1,'li2':li2})
def aboutus_view(request):
return render(request,'library/about_us.html')
def contactus_view(request):
sub = forms.ContactusForm()
if request.method == 'POST':
sub = forms.ContactusForm(request.POST)
if sub.is_valid():
email = sub.cleaned_data['Email']
name=sub.cleaned_data['Name']
message = sub.cleaned_data['Message']
send_mail(str(name)+' || '+str(email),message, EMAIL_HOST_USER, ['[email protected]'], fail_silently = False)
return render(request, 'library/contact_us_success.html')
return render(request, 'library/contact_us.html', {'form':sub})
@login_required(login_url='studentlogin')
def viewbook_view(request):
books=models.Book.objects.all()
return render(request,'library/view_book.html',{'books':books})
def blog_view(request):
return render(request,'library/blogs.html')
|
flexible
|
{
"blob_id": "ce9e1ac0f1596ba4db904289f91f5ab95c2de4b8",
"index": 7642,
"step-1": "<mask token>\n\n\ndef home_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/index.html')\n\n\n<mask token>\n\n\ndef studentsignup_view(request):\n form1 = forms.StudentUserForm()\n form2 = forms.StudentExtraForm()\n mydict = {'form1': form1, 'form2': form2}\n if request.method == 'POST':\n form1 = forms.StudentUserForm(request.POST)\n form2 = forms.StudentExtraForm(request.POST)\n if form1.is_valid() and form2.is_valid():\n user = form1.save()\n user.set_password(user.password)\n user.save()\n f2 = form2.save(commit=False)\n f2.user = user\n user2 = f2.save()\n my_student_group = Group.objects.get_or_create(name='STUDENT')\n my_student_group[0].user_set.add(user)\n return HttpResponseRedirect('studentlogin')\n return render(request, 'library/student_signup.html', context=mydict)\n\n\n<mask token>\n\n\ndef afterlogin_view(request):\n if is_admin(request.user):\n return render(request, 'library/admin_afterlogin.html')\n else:\n return render(request, 'library/student_afterlogin.html')\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef addbook_view(request):\n form = forms.BookForm()\n if request.method == 'POST':\n form = forms.BookForm(request.POST)\n if form.is_valid():\n user = form.save()\n return render(request, 'library/book_added.html')\n return render(request, 'library/add_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n<mask token>\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewstudent_view(request):\n students = models.StudentExtra.objects.all()\n return render(request, 'library/view_student.html', {'students': students})\n\n\n<mask token>\n\n\ndef aboutus_view(request):\n return render(request, 'library/about_us.html')\n\n\n<mask token>\n\n\n@login_required(login_url='studentlogin')\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef home_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/index.html')\n\n\n<mask token>\n\n\ndef studentsignup_view(request):\n form1 = forms.StudentUserForm()\n form2 = forms.StudentExtraForm()\n mydict = {'form1': form1, 'form2': form2}\n if request.method == 'POST':\n form1 = forms.StudentUserForm(request.POST)\n form2 = forms.StudentExtraForm(request.POST)\n if form1.is_valid() and form2.is_valid():\n user = form1.save()\n user.set_password(user.password)\n user.save()\n f2 = form2.save(commit=False)\n f2.user = user\n user2 = f2.save()\n my_student_group = Group.objects.get_or_create(name='STUDENT')\n my_student_group[0].user_set.add(user)\n return HttpResponseRedirect('studentlogin')\n return render(request, 'library/student_signup.html', context=mydict)\n\n\ndef is_admin(user):\n return user.groups.filter(name='ADMIN').exists()\n\n\ndef afterlogin_view(request):\n if is_admin(request.user):\n return render(request, 'library/admin_afterlogin.html')\n else:\n return render(request, 'library/student_afterlogin.html')\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef addbook_view(request):\n form = forms.BookForm()\n if request.method == 'POST':\n form = forms.BookForm(request.POST)\n if form.is_valid():\n user = form.save()\n return render(request, 'library/book_added.html')\n return render(request, 'library/add_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef issuebook_view(request):\n form = forms.IssuedBookForm()\n if request.method == 'POST':\n form = forms.IssuedBookForm(request.POST)\n if form.is_valid():\n obj = models.IssuedBook()\n obj.enrollment = request.POST.get('enrollment2')\n obj.isbn = request.POST.get('isbn2')\n obj.save()\n return render(request, 'library/book_issued.html')\n return render(request, 'library/issue_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewissuedbook_view(request):\n issuedbooks = models.IssuedBook.objects.all()\n li = []\n for lb in issuedbooks:\n issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month\n ) + '-' + str(lb.issuedate.year)\n expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month\n ) + '-' + str(lb.expirydate.year)\n days = date.today() - lb.issuedate\n print(date.today())\n d = days.days\n fine = 0\n if d > 20:\n day = d - 20\n fine = day * 10\n books = list(models.Book.objects.filter(isbn=lb.isbn))\n students = list(models.StudentExtra.objects.filter(enrollment=lb.\n enrollment))\n i = 0\n for l in books:\n t = students[i].get_name, students[i].enrollment, books[i\n ].name, books[i].author, issdate, expdate, fine\n i = i + 1\n li.append(t)\n return render(request, 'library/view_issued_book.html', {'li': li})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewstudent_view(request):\n students = models.StudentExtra.objects.all()\n return render(request, 'library/view_student.html', {'students': students})\n\n\n<mask token>\n\n\ndef aboutus_view(request):\n return render(request, 'library/about_us.html')\n\n\n<mask token>\n\n\n@login_required(login_url='studentlogin')\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef home_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/index.html')\n\n\n<mask token>\n\n\ndef adminclick_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/admin_click.html')\n\n\n<mask token>\n\n\ndef studentsignup_view(request):\n form1 = forms.StudentUserForm()\n form2 = forms.StudentExtraForm()\n mydict = {'form1': form1, 'form2': form2}\n if request.method == 'POST':\n form1 = forms.StudentUserForm(request.POST)\n form2 = forms.StudentExtraForm(request.POST)\n if form1.is_valid() and form2.is_valid():\n user = form1.save()\n user.set_password(user.password)\n user.save()\n f2 = form2.save(commit=False)\n f2.user = user\n user2 = f2.save()\n my_student_group = Group.objects.get_or_create(name='STUDENT')\n my_student_group[0].user_set.add(user)\n return HttpResponseRedirect('studentlogin')\n return render(request, 'library/student_signup.html', context=mydict)\n\n\ndef is_admin(user):\n return user.groups.filter(name='ADMIN').exists()\n\n\ndef afterlogin_view(request):\n if is_admin(request.user):\n return render(request, 'library/admin_afterlogin.html')\n else:\n return render(request, 'library/student_afterlogin.html')\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef addbook_view(request):\n form = forms.BookForm()\n if request.method == 'POST':\n form = forms.BookForm(request.POST)\n if form.is_valid():\n user = form.save()\n return render(request, 'library/book_added.html')\n return render(request, 'library/add_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef issuebook_view(request):\n form = forms.IssuedBookForm()\n if request.method == 'POST':\n form = forms.IssuedBookForm(request.POST)\n if form.is_valid():\n obj = models.IssuedBook()\n obj.enrollment = request.POST.get('enrollment2')\n obj.isbn = request.POST.get('isbn2')\n obj.save()\n return render(request, 'library/book_issued.html')\n return render(request, 'library/issue_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewissuedbook_view(request):\n issuedbooks = models.IssuedBook.objects.all()\n li = []\n for lb in issuedbooks:\n issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month\n ) + '-' + str(lb.issuedate.year)\n expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month\n ) + '-' + str(lb.expirydate.year)\n days = date.today() - lb.issuedate\n print(date.today())\n d = days.days\n fine = 0\n if d > 20:\n day = d - 20\n fine = day * 10\n books = list(models.Book.objects.filter(isbn=lb.isbn))\n students = list(models.StudentExtra.objects.filter(enrollment=lb.\n enrollment))\n i = 0\n for l in books:\n t = students[i].get_name, students[i].enrollment, books[i\n ].name, books[i].author, issdate, expdate, fine\n i = i + 1\n li.append(t)\n return render(request, 'library/view_issued_book.html', {'li': li})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewstudent_view(request):\n students = models.StudentExtra.objects.all()\n return render(request, 'library/view_student.html', {'students': students})\n\n\n@login_required(login_url='studentlogin')\ndef viewissuedbookbystudent(request):\n student = models.StudentExtra.objects.filter(user_id=request.user.id)\n issuedbook = models.IssuedBook.objects.filter(enrollment=student[0].\n enrollment)\n li1 = []\n li2 = []\n for ib in issuedbook:\n books = models.Book.objects.filter(isbn=ib.isbn)\n for book in books:\n t = request.user, student[0], book.name, book.author\n li1.append(t)\n issdate = str(ib.issuedate.day) + '-' + str(ib.issuedate.month\n ) + '-' + str(ib.issuedate.year)\n expdate = str(ib.expirydate.day) + '-' + str(ib.expirydate.month\n ) + '-' + str(ib.expirydate.year)\n days = date.today() - ib.issuedate\n print(date.today())\n d = days.days\n fine = 0\n if d > 20:\n day = d - 20\n fine = day * 10\n t = issdate, expdate, fine\n li2.append(t)\n return render(request, 'library/view_issued_book_bystudent.html', {\n 'li1': li1, 'li2': li2})\n\n\ndef aboutus_view(request):\n return render(request, 'library/about_us.html')\n\n\ndef contactus_view(request):\n sub = forms.ContactusForm()\n if request.method == 'POST':\n sub = forms.ContactusForm(request.POST)\n if sub.is_valid():\n email = sub.cleaned_data['Email']\n name = sub.cleaned_data['Name']\n message = sub.cleaned_data['Message']\n send_mail(str(name) + ' || ' + str(email), message,\n EMAIL_HOST_USER, ['[email protected]'], fail_silently\n =False)\n return render(request, 'library/contact_us_success.html')\n return render(request, 'library/contact_us.html', {'form': sub})\n\n\n@login_required(login_url='studentlogin')\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\ndef blog_view(request):\n return render(request, 'library/blogs.html')\n",
"step-4": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponseRedirect\nfrom . import forms, models\nfrom django.contrib.auth.models import Group\nfrom django.contrib import auth\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nfrom datetime import datetime, timedelta, date\nfrom django.core.mail import send_mail\nfrom librarymanagement.settings import EMAIL_HOST_USER\nfrom django.contrib import messages\n\n\ndef home_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/index.html')\n\n\ndef studentclick_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/student_click.html')\n\n\ndef adminclick_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request, 'library/admin_click.html')\n\n\ndef adminsignup_view(request):\n form = forms.AdminSigupForm()\n if request.method == 'POST':\n form = forms.AdminSigupForm(request.POST)\n if form.is_valid():\n user = form.save()\n user.set_password(user.password)\n user.save()\n my_admin_group = Group.objects.get_or_create(name='ADMIN')\n my_admin_group[0].user_set.add(user)\n return HttpResponseRedirect('adminlogin')\n return render(request, 'library/admin_signup.html', {'form': form})\n\n\ndef studentsignup_view(request):\n form1 = forms.StudentUserForm()\n form2 = forms.StudentExtraForm()\n mydict = {'form1': form1, 'form2': form2}\n if request.method == 'POST':\n form1 = forms.StudentUserForm(request.POST)\n form2 = forms.StudentExtraForm(request.POST)\n if form1.is_valid() and form2.is_valid():\n user = form1.save()\n user.set_password(user.password)\n user.save()\n f2 = form2.save(commit=False)\n f2.user = user\n user2 = f2.save()\n my_student_group = Group.objects.get_or_create(name='STUDENT')\n my_student_group[0].user_set.add(user)\n return HttpResponseRedirect('studentlogin')\n return render(request, 'library/student_signup.html', context=mydict)\n\n\ndef is_admin(user):\n return user.groups.filter(name='ADMIN').exists()\n\n\ndef afterlogin_view(request):\n if is_admin(request.user):\n return render(request, 'library/admin_afterlogin.html')\n else:\n return render(request, 'library/student_afterlogin.html')\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef addbook_view(request):\n form = forms.BookForm()\n if request.method == 'POST':\n form = forms.BookForm(request.POST)\n if form.is_valid():\n user = form.save()\n return render(request, 'library/book_added.html')\n return render(request, 'library/add_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef issuebook_view(request):\n form = forms.IssuedBookForm()\n if request.method == 'POST':\n form = forms.IssuedBookForm(request.POST)\n if form.is_valid():\n obj = models.IssuedBook()\n obj.enrollment = request.POST.get('enrollment2')\n obj.isbn = request.POST.get('isbn2')\n obj.save()\n return render(request, 'library/book_issued.html')\n return render(request, 'library/issue_book.html', {'form': form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewissuedbook_view(request):\n issuedbooks = models.IssuedBook.objects.all()\n li = []\n for lb in issuedbooks:\n issdate = str(lb.issuedate.day) + '-' + str(lb.issuedate.month\n ) + '-' + str(lb.issuedate.year)\n expdate = str(lb.expirydate.day) + '-' + str(lb.expirydate.month\n ) + '-' + str(lb.expirydate.year)\n days = date.today() - lb.issuedate\n print(date.today())\n d = days.days\n fine = 0\n if d > 20:\n day = d - 20\n fine = day * 10\n books = list(models.Book.objects.filter(isbn=lb.isbn))\n students = list(models.StudentExtra.objects.filter(enrollment=lb.\n enrollment))\n i = 0\n for l in books:\n t = students[i].get_name, students[i].enrollment, books[i\n ].name, books[i].author, issdate, expdate, fine\n i = i + 1\n li.append(t)\n return render(request, 'library/view_issued_book.html', {'li': li})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewstudent_view(request):\n students = models.StudentExtra.objects.all()\n return render(request, 'library/view_student.html', {'students': students})\n\n\n@login_required(login_url='studentlogin')\ndef viewissuedbookbystudent(request):\n student = models.StudentExtra.objects.filter(user_id=request.user.id)\n issuedbook = models.IssuedBook.objects.filter(enrollment=student[0].\n enrollment)\n li1 = []\n li2 = []\n for ib in issuedbook:\n books = models.Book.objects.filter(isbn=ib.isbn)\n for book in books:\n t = request.user, student[0], book.name, book.author\n li1.append(t)\n issdate = str(ib.issuedate.day) + '-' + str(ib.issuedate.month\n ) + '-' + str(ib.issuedate.year)\n expdate = str(ib.expirydate.day) + '-' + str(ib.expirydate.month\n ) + '-' + str(ib.expirydate.year)\n days = date.today() - ib.issuedate\n print(date.today())\n d = days.days\n fine = 0\n if d > 20:\n day = d - 20\n fine = day * 10\n t = issdate, expdate, fine\n li2.append(t)\n return render(request, 'library/view_issued_book_bystudent.html', {\n 'li1': li1, 'li2': li2})\n\n\ndef aboutus_view(request):\n return render(request, 'library/about_us.html')\n\n\ndef contactus_view(request):\n sub = forms.ContactusForm()\n if request.method == 'POST':\n sub = forms.ContactusForm(request.POST)\n if sub.is_valid():\n email = sub.cleaned_data['Email']\n name = sub.cleaned_data['Name']\n message = sub.cleaned_data['Message']\n send_mail(str(name) + ' || ' + str(email), message,\n EMAIL_HOST_USER, ['[email protected]'], fail_silently\n =False)\n return render(request, 'library/contact_us_success.html')\n return render(request, 'library/contact_us.html', {'form': sub})\n\n\n@login_required(login_url='studentlogin')\ndef viewbook_view(request):\n books = models.Book.objects.all()\n return render(request, 'library/view_book.html', {'books': books})\n\n\ndef blog_view(request):\n return render(request, 'library/blogs.html')\n",
"step-5": "from django.shortcuts import render,redirect\nfrom django.http import HttpResponseRedirect\nfrom . import forms,models\nfrom django.contrib.auth.models import Group\nfrom django.contrib import auth\nfrom django.contrib.auth.decorators import login_required,user_passes_test\nfrom datetime import datetime,timedelta,date\nfrom django.core.mail import send_mail\nfrom librarymanagement.settings import EMAIL_HOST_USER\nfrom django.contrib import messages\n#from django.contib.auth.models import user, auth\n\n\ndef home_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request,'library/index.html')\n\n# for showing blog content to users\n\n\n#for showing signup/login button for student\ndef studentclick_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request,'library/student_click.html')\n\n#for showing signup/login button for teacher\ndef adminclick_view(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect('afterlogin')\n return render(request,'library/admin_click.html')\n\n\n\ndef adminsignup_view(request):\n form=forms.AdminSigupForm()\n if request.method=='POST':\n form=forms.AdminSigupForm(request.POST)\n if form.is_valid():\n user=form.save()\n user.set_password(user.password)\n user.save()\n\n\n my_admin_group = Group.objects.get_or_create(name='ADMIN')\n my_admin_group[0].user_set.add(user)\n\n return HttpResponseRedirect('adminlogin')\n return render(request,'library/admin_signup.html',{'form':form})\n\n\n\n\n\n\ndef studentsignup_view(request):\n form1=forms.StudentUserForm()\n form2=forms.StudentExtraForm()\n mydict={'form1':form1,'form2':form2}\n if request.method=='POST':\n form1=forms.StudentUserForm(request.POST)\n form2=forms.StudentExtraForm(request.POST)\n if form1.is_valid() and form2.is_valid():\n user=form1.save()\n user.set_password(user.password)\n user.save()\n f2=form2.save(commit=False)\n f2.user=user\n user2=f2.save()\n\n my_student_group = Group.objects.get_or_create(name='STUDENT')\n my_student_group[0].user_set.add(user)\n\n return HttpResponseRedirect('studentlogin')\n return render(request,'library/student_signup.html',context=mydict)\n\n\n\n\ndef is_admin(user):\n return user.groups.filter(name='ADMIN').exists()\n\ndef afterlogin_view(request):\n if is_admin(request.user):\n return render(request,'library/admin_afterlogin.html')\n else:\n return render(request,'library/student_afterlogin.html')\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef addbook_view(request):\n #now it is empty book form for sending to html\n form=forms.BookForm()\n if request.method=='POST':\n #now this form have data from html\n form=forms.BookForm(request.POST)\n if form.is_valid():\n user=form.save()\n return render(request,'library/book_added.html')\n return render(request,'library/add_book.html',{'form':form})\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewbook_view(request):\n books=models.Book.objects.all()\n return render(request,'library/view_book.html',{'books':books})\n\n\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef issuebook_view(request):\n form=forms.IssuedBookForm()\n if request.method=='POST':\n #now this form have data from html\n form=forms.IssuedBookForm(request.POST)\n if form.is_valid():\n obj=models.IssuedBook()\n obj.enrollment=request.POST.get('enrollment2')\n obj.isbn=request.POST.get('isbn2')\n obj.save()\n return render(request,'library/book_issued.html')\n return render(request,'library/issue_book.html',{'form':form})\n\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewissuedbook_view(request):\n issuedbooks=models.IssuedBook.objects.all()\n li=[]\n for lb in issuedbooks:\n issdate=str(lb.issuedate.day)+'-'+str(lb.issuedate.month)+'-'+str(lb.issuedate.year)\n expdate=str(lb.expirydate.day)+'-'+str(lb.expirydate.month)+'-'+str(lb.expirydate.year)\n #fine calculation\n days=(date.today()-lb.issuedate)\n print(date.today())\n d=days.days\n fine=0\n if d>20:\n day=d-20\n fine=day*10\n\n\n books=list(models.Book.objects.filter(isbn=lb.isbn))\n students=list(models.StudentExtra.objects.filter(enrollment=lb.enrollment))\n i=0\n for l in books:\n t=(students[i].get_name,students[i].enrollment,books[i].name,books[i].author,issdate,expdate,fine)\n i=i+1\n li.append(t)\n\n return render(request,'library/view_issued_book.html',{'li':li})\n\n@login_required(login_url='adminlogin')\n@user_passes_test(is_admin)\ndef viewstudent_view(request):\n students=models.StudentExtra.objects.all()\n return render(request,'library/view_student.html',{'students':students})\n\n\n@login_required(login_url='studentlogin')\ndef viewissuedbookbystudent(request):\n student=models.StudentExtra.objects.filter(user_id=request.user.id)\n issuedbook=models.IssuedBook.objects.filter(enrollment=student[0].enrollment)\n\n li1=[]\n\n li2=[]\n for ib in issuedbook:\n books=models.Book.objects.filter(isbn=ib.isbn)\n for book in books:\n t=(request.user,student[0],book.name,book.author)\n li1.append(t)\n issdate=str(ib.issuedate.day)+'-'+str(ib.issuedate.month)+'-'+str(ib.issuedate.year)\n expdate=str(ib.expirydate.day)+'-'+str(ib.expirydate.month)+'-'+str(ib.expirydate.year)\n \n #fine calculation\n days=(date.today()-ib.issuedate)\n print(date.today())\n d=days.days\n fine=0\n if d>20:\n day=d-20\n fine=day*10\n t=(issdate,expdate,fine)\n li2.append(t)\n\n return render(request,'library/view_issued_book_bystudent.html',{'li1':li1,'li2':li2})\n\ndef aboutus_view(request):\n return render(request,'library/about_us.html')\n\ndef contactus_view(request):\n sub = forms.ContactusForm()\n if request.method == 'POST':\n sub = forms.ContactusForm(request.POST)\n if sub.is_valid():\n email = sub.cleaned_data['Email']\n name=sub.cleaned_data['Name']\n message = sub.cleaned_data['Message']\n send_mail(str(name)+' || '+str(email),message, EMAIL_HOST_USER, ['[email protected]'], fail_silently = False)\n return render(request, 'library/contact_us_success.html')\n return render(request, 'library/contact_us.html', {'form':sub})\n\n\n@login_required(login_url='studentlogin')\ndef viewbook_view(request):\n books=models.Book.objects.all()\n return render(request,'library/view_book.html',{'books':books})\n\n\ndef blog_view(request):\n return render(request,'library/blogs.html')",
"step-ids": [
8,
11,
15,
18,
19
]
}
|
[
8,
11,
15,
18,
19
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ItemInfo(Command):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ItemInfo(Command):
@is_command
def item_info(self, player, *args):
if len(args) == 0:
raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)
item_id = args[0]
if item_id in player.inventory:
item = player.inventory[item_id]
elif item_id in player.location.lobjects:
item = player.location.lobjects[item_id]
else:
raise CommandException(CommandException.UNKNOWN_ITEM)
return Event('item-info', {'item': item.to_dict()})
<|reserved_special_token_1|>
from command import Command, is_command, CommandException
from event import Event
class ItemInfo(Command):
@is_command
def item_info(self, player, *args):
if len(args) == 0:
raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)
item_id = args[0]
if item_id in player.inventory:
item = player.inventory[item_id]
elif item_id in player.location.lobjects:
item = player.location.lobjects[item_id]
else:
raise CommandException(CommandException.UNKNOWN_ITEM)
return Event('item-info', {'item': item.to_dict()})
<|reserved_special_token_1|>
from command import Command, is_command, CommandException
from event import Event
class ItemInfo(Command):
@is_command
def item_info(self, player, *args):
if len(args) == 0:
raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)
item_id = args[0]
if item_id in player.inventory:
item = player.inventory[item_id]
elif item_id in player.location.lobjects:
item = player.location.lobjects[item_id]
else:
raise CommandException(CommandException.UNKNOWN_ITEM)
return Event('item-info', {"item": item.to_dict()})
|
flexible
|
{
"blob_id": "6b2bd6954f188626fa857ffc37611d3f971d22e2",
"index": 5259,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ItemInfo(Command):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ItemInfo(Command):\n\n @is_command\n def item_info(self, player, *args):\n if len(args) == 0:\n raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)\n item_id = args[0]\n if item_id in player.inventory:\n item = player.inventory[item_id]\n elif item_id in player.location.lobjects:\n item = player.location.lobjects[item_id]\n else:\n raise CommandException(CommandException.UNKNOWN_ITEM)\n return Event('item-info', {'item': item.to_dict()})\n",
"step-4": "from command import Command, is_command, CommandException\nfrom event import Event\n\n\nclass ItemInfo(Command):\n\n @is_command\n def item_info(self, player, *args):\n if len(args) == 0:\n raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)\n item_id = args[0]\n if item_id in player.inventory:\n item = player.inventory[item_id]\n elif item_id in player.location.lobjects:\n item = player.location.lobjects[item_id]\n else:\n raise CommandException(CommandException.UNKNOWN_ITEM)\n return Event('item-info', {'item': item.to_dict()})\n",
"step-5": "from command import Command, is_command, CommandException\nfrom event import Event\n\n\nclass ItemInfo(Command):\n\n @is_command\n def item_info(self, player, *args):\n if len(args) == 0:\n raise CommandException(CommandException.NOT_ENOUGH_ARGUMENTS)\n item_id = args[0]\n if item_id in player.inventory:\n item = player.inventory[item_id]\n elif item_id in player.location.lobjects:\n item = player.location.lobjects[item_id]\n else:\n raise CommandException(CommandException.UNKNOWN_ITEM)\n\n return Event('item-info', {\"item\": item.to_dict()})",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def check_database(engine: Engine, user_name: pwd.struct_passwd, tables:
Iterable[Table]):
logger.info('Checking database access as user %s', user_name)
try:
conn = engine.connect()
except DBAPIError as e:
logger.critical('Could not connect to database as %s: %s', user_name, e
)
raise
with contextlib.closing(conn):
for table in tables:
try:
check_table(conn, table)
except DBAPIError as e:
logger.critical(
'Query check for table %s as user %s failed: %s', table
.name, user_name, e)
raise
def check_table(conn, table):
conn.execute(select([exists(select([null()]).select_from(table))])).scalar(
)
def main():
parser = ArgumentParser(parents=[common_parser])
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
config = load_config(args.config, runtime_checks=True)
try:
engine = db.create_engine(config, poolclass=NullPool)
agent_pwd = pwd.getpwnam(constants.AGENT_USER)
with dropped_privileges(agent_pwd):
check_database(engine, agent_pwd.pw_name, (db.radacct, db.
radpostauth))
portal_pwd = pwd.getpwnam(constants.PORTAL_USER)
with dropped_privileges(portal_pwd):
check_database(engine, portal_pwd.pw_name, (db.radacct, db.
radpostauth, db.radusergroup))
radius_pwd = pwd.getpwnam(constants.RADIUS_USER)
with dropped_privileges(radius_pwd):
check_database(engine, radius_pwd.pw_name, (db.radacct, db.
radgroupcheck, db.radgroupreply, db.radpostauth, db.
radreply, db.radusergroup))
except DBAPIError:
return os.EX_TEMPFAIL
return os.EX_OK
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def check_database(engine: Engine, user_name: pwd.struct_passwd, tables:
Iterable[Table]):
logger.info('Checking database access as user %s', user_name)
try:
conn = engine.connect()
except DBAPIError as e:
logger.critical('Could not connect to database as %s: %s', user_name, e
)
raise
with contextlib.closing(conn):
for table in tables:
try:
check_table(conn, table)
except DBAPIError as e:
logger.critical(
'Query check for table %s as user %s failed: %s', table
.name, user_name, e)
raise
def check_table(conn, table):
conn.execute(select([exists(select([null()]).select_from(table))])).scalar(
)
def main():
parser = ArgumentParser(parents=[common_parser])
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
config = load_config(args.config, runtime_checks=True)
try:
engine = db.create_engine(config, poolclass=NullPool)
agent_pwd = pwd.getpwnam(constants.AGENT_USER)
with dropped_privileges(agent_pwd):
check_database(engine, agent_pwd.pw_name, (db.radacct, db.
radpostauth))
portal_pwd = pwd.getpwnam(constants.PORTAL_USER)
with dropped_privileges(portal_pwd):
check_database(engine, portal_pwd.pw_name, (db.radacct, db.
radpostauth, db.radusergroup))
radius_pwd = pwd.getpwnam(constants.RADIUS_USER)
with dropped_privileges(radius_pwd):
check_database(engine, radius_pwd.pw_name, (db.radacct, db.
radgroupcheck, db.radgroupreply, db.radpostauth, db.
radreply, db.radusergroup))
except DBAPIError:
return os.EX_TEMPFAIL
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__package__)
def check_database(engine: Engine, user_name: pwd.struct_passwd, tables:
Iterable[Table]):
logger.info('Checking database access as user %s', user_name)
try:
conn = engine.connect()
except DBAPIError as e:
logger.critical('Could not connect to database as %s: %s', user_name, e
)
raise
with contextlib.closing(conn):
for table in tables:
try:
check_table(conn, table)
except DBAPIError as e:
logger.critical(
'Query check for table %s as user %s failed: %s', table
.name, user_name, e)
raise
def check_table(conn, table):
conn.execute(select([exists(select([null()]).select_from(table))])).scalar(
)
def main():
parser = ArgumentParser(parents=[common_parser])
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
config = load_config(args.config, runtime_checks=True)
try:
engine = db.create_engine(config, poolclass=NullPool)
agent_pwd = pwd.getpwnam(constants.AGENT_USER)
with dropped_privileges(agent_pwd):
check_database(engine, agent_pwd.pw_name, (db.radacct, db.
radpostauth))
portal_pwd = pwd.getpwnam(constants.PORTAL_USER)
with dropped_privileges(portal_pwd):
check_database(engine, portal_pwd.pw_name, (db.radacct, db.
radpostauth, db.radusergroup))
radius_pwd = pwd.getpwnam(constants.RADIUS_USER)
with dropped_privileges(radius_pwd):
check_database(engine, radius_pwd.pw_name, (db.radacct, db.
radgroupcheck, db.radgroupreply, db.radpostauth, db.
radreply, db.radusergroup))
except DBAPIError:
return os.EX_TEMPFAIL
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
<|reserved_special_token_1|>
import contextlib
import logging
import os
import pwd
import sys
from typing import Iterable
from sqlalchemy import Table, exists, null, select
from sqlalchemy.engine import Engine
from sqlalchemy.exc import DBAPIError
from sqlalchemy.pool import NullPool
from hades import constants
from hades.common import db
from hades.common.cli import ArgumentParser, parser as common_parser, setup_cli_logging
from hades.common.privileges import dropped_privileges
from hades.config.loader import load_config
logger = logging.getLogger(__package__)
def check_database(engine: Engine, user_name: pwd.struct_passwd, tables:
Iterable[Table]):
logger.info('Checking database access as user %s', user_name)
try:
conn = engine.connect()
except DBAPIError as e:
logger.critical('Could not connect to database as %s: %s', user_name, e
)
raise
with contextlib.closing(conn):
for table in tables:
try:
check_table(conn, table)
except DBAPIError as e:
logger.critical(
'Query check for table %s as user %s failed: %s', table
.name, user_name, e)
raise
def check_table(conn, table):
conn.execute(select([exists(select([null()]).select_from(table))])).scalar(
)
def main():
parser = ArgumentParser(parents=[common_parser])
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
config = load_config(args.config, runtime_checks=True)
try:
engine = db.create_engine(config, poolclass=NullPool)
agent_pwd = pwd.getpwnam(constants.AGENT_USER)
with dropped_privileges(agent_pwd):
check_database(engine, agent_pwd.pw_name, (db.radacct, db.
radpostauth))
portal_pwd = pwd.getpwnam(constants.PORTAL_USER)
with dropped_privileges(portal_pwd):
check_database(engine, portal_pwd.pw_name, (db.radacct, db.
radpostauth, db.radusergroup))
radius_pwd = pwd.getpwnam(constants.RADIUS_USER)
with dropped_privileges(radius_pwd):
check_database(engine, radius_pwd.pw_name, (db.radacct, db.
radgroupcheck, db.radgroupreply, db.radpostauth, db.
radreply, db.radusergroup))
except DBAPIError:
return os.EX_TEMPFAIL
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
<|reserved_special_token_1|>
import contextlib
import logging
import os
import pwd
import sys
from typing import Iterable
from sqlalchemy import Table, exists, null, select
from sqlalchemy.engine import Engine
from sqlalchemy.exc import DBAPIError
from sqlalchemy.pool import NullPool
from hades import constants
from hades.common import db
from hades.common.cli import (
ArgumentParser, parser as common_parser, setup_cli_logging,
)
from hades.common.privileges import dropped_privileges
from hades.config.loader import load_config
logger = logging.getLogger(__package__)
def check_database(engine: Engine, user_name: pwd.struct_passwd,
tables: Iterable[Table]):
logger.info("Checking database access as user %s", user_name)
try:
conn = engine.connect()
except DBAPIError as e:
logger.critical("Could not connect to database as %s: %s",
user_name, e)
raise
with contextlib.closing(conn):
for table in tables:
try:
check_table(conn, table)
except DBAPIError as e:
logger.critical("Query check for table %s as user %s failed: "
"%s", table.name, user_name, e)
raise
def check_table(conn, table):
conn.execute(select([exists(select([null()]).select_from(table))])).scalar()
def main():
parser = ArgumentParser(parents=[common_parser])
args = parser.parse_args()
setup_cli_logging(parser.prog, args)
config = load_config(args.config, runtime_checks=True)
try:
engine = db.create_engine(config, poolclass=NullPool)
agent_pwd = pwd.getpwnam(constants.AGENT_USER)
with dropped_privileges(agent_pwd):
check_database(engine, agent_pwd.pw_name,
(db.radacct, db.radpostauth))
portal_pwd = pwd.getpwnam(constants.PORTAL_USER)
with dropped_privileges(portal_pwd):
check_database(engine, portal_pwd.pw_name,
(db.radacct, db.radpostauth, db.radusergroup))
radius_pwd = pwd.getpwnam(constants.RADIUS_USER)
with dropped_privileges(radius_pwd):
check_database(engine, radius_pwd.pw_name,
(db.radacct, db.radgroupcheck, db.radgroupreply,
db.radpostauth, db.radreply, db.radusergroup))
except DBAPIError:
return os.EX_TEMPFAIL
return os.EX_OK
if __name__ == '__main__':
sys.exit(main())
|
flexible
|
{
"blob_id": "c9df53ac06b8bb106d73825d60fa885c06385e95",
"index": 8557,
"step-1": "<mask token>\n\n\ndef check_database(engine: Engine, user_name: pwd.struct_passwd, tables:\n Iterable[Table]):\n logger.info('Checking database access as user %s', user_name)\n try:\n conn = engine.connect()\n except DBAPIError as e:\n logger.critical('Could not connect to database as %s: %s', user_name, e\n )\n raise\n with contextlib.closing(conn):\n for table in tables:\n try:\n check_table(conn, table)\n except DBAPIError as e:\n logger.critical(\n 'Query check for table %s as user %s failed: %s', table\n .name, user_name, e)\n raise\n\n\ndef check_table(conn, table):\n conn.execute(select([exists(select([null()]).select_from(table))])).scalar(\n )\n\n\ndef main():\n parser = ArgumentParser(parents=[common_parser])\n args = parser.parse_args()\n setup_cli_logging(parser.prog, args)\n config = load_config(args.config, runtime_checks=True)\n try:\n engine = db.create_engine(config, poolclass=NullPool)\n agent_pwd = pwd.getpwnam(constants.AGENT_USER)\n with dropped_privileges(agent_pwd):\n check_database(engine, agent_pwd.pw_name, (db.radacct, db.\n radpostauth))\n portal_pwd = pwd.getpwnam(constants.PORTAL_USER)\n with dropped_privileges(portal_pwd):\n check_database(engine, portal_pwd.pw_name, (db.radacct, db.\n radpostauth, db.radusergroup))\n radius_pwd = pwd.getpwnam(constants.RADIUS_USER)\n with dropped_privileges(radius_pwd):\n check_database(engine, radius_pwd.pw_name, (db.radacct, db.\n radgroupcheck, db.radgroupreply, db.radpostauth, db.\n radreply, db.radusergroup))\n except DBAPIError:\n return os.EX_TEMPFAIL\n return os.EX_OK\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_database(engine: Engine, user_name: pwd.struct_passwd, tables:\n Iterable[Table]):\n logger.info('Checking database access as user %s', user_name)\n try:\n conn = engine.connect()\n except DBAPIError as e:\n logger.critical('Could not connect to database as %s: %s', user_name, e\n )\n raise\n with contextlib.closing(conn):\n for table in tables:\n try:\n check_table(conn, table)\n except DBAPIError as e:\n logger.critical(\n 'Query check for table %s as user %s failed: %s', table\n .name, user_name, e)\n raise\n\n\ndef check_table(conn, table):\n conn.execute(select([exists(select([null()]).select_from(table))])).scalar(\n )\n\n\ndef main():\n parser = ArgumentParser(parents=[common_parser])\n args = parser.parse_args()\n setup_cli_logging(parser.prog, args)\n config = load_config(args.config, runtime_checks=True)\n try:\n engine = db.create_engine(config, poolclass=NullPool)\n agent_pwd = pwd.getpwnam(constants.AGENT_USER)\n with dropped_privileges(agent_pwd):\n check_database(engine, agent_pwd.pw_name, (db.radacct, db.\n radpostauth))\n portal_pwd = pwd.getpwnam(constants.PORTAL_USER)\n with dropped_privileges(portal_pwd):\n check_database(engine, portal_pwd.pw_name, (db.radacct, db.\n radpostauth, db.radusergroup))\n radius_pwd = pwd.getpwnam(constants.RADIUS_USER)\n with dropped_privileges(radius_pwd):\n check_database(engine, radius_pwd.pw_name, (db.radacct, db.\n radgroupcheck, db.radgroupreply, db.radpostauth, db.\n radreply, db.radusergroup))\n except DBAPIError:\n return os.EX_TEMPFAIL\n return os.EX_OK\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__package__)\n\n\ndef check_database(engine: Engine, user_name: pwd.struct_passwd, tables:\n Iterable[Table]):\n logger.info('Checking database access as user %s', user_name)\n try:\n conn = engine.connect()\n except DBAPIError as e:\n logger.critical('Could not connect to database as %s: %s', user_name, e\n )\n raise\n with contextlib.closing(conn):\n for table in tables:\n try:\n check_table(conn, table)\n except DBAPIError as e:\n logger.critical(\n 'Query check for table %s as user %s failed: %s', table\n .name, user_name, e)\n raise\n\n\ndef check_table(conn, table):\n conn.execute(select([exists(select([null()]).select_from(table))])).scalar(\n )\n\n\ndef main():\n parser = ArgumentParser(parents=[common_parser])\n args = parser.parse_args()\n setup_cli_logging(parser.prog, args)\n config = load_config(args.config, runtime_checks=True)\n try:\n engine = db.create_engine(config, poolclass=NullPool)\n agent_pwd = pwd.getpwnam(constants.AGENT_USER)\n with dropped_privileges(agent_pwd):\n check_database(engine, agent_pwd.pw_name, (db.radacct, db.\n radpostauth))\n portal_pwd = pwd.getpwnam(constants.PORTAL_USER)\n with dropped_privileges(portal_pwd):\n check_database(engine, portal_pwd.pw_name, (db.radacct, db.\n radpostauth, db.radusergroup))\n radius_pwd = pwd.getpwnam(constants.RADIUS_USER)\n with dropped_privileges(radius_pwd):\n check_database(engine, radius_pwd.pw_name, (db.radacct, db.\n radgroupcheck, db.radgroupreply, db.radpostauth, db.\n radreply, db.radusergroup))\n except DBAPIError:\n return os.EX_TEMPFAIL\n return os.EX_OK\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-4": "import contextlib\nimport logging\nimport os\nimport pwd\nimport sys\nfrom typing import Iterable\nfrom sqlalchemy import Table, exists, null, select\nfrom sqlalchemy.engine import Engine\nfrom sqlalchemy.exc import DBAPIError\nfrom sqlalchemy.pool import NullPool\nfrom hades import constants\nfrom hades.common import db\nfrom hades.common.cli import ArgumentParser, parser as common_parser, setup_cli_logging\nfrom hades.common.privileges import dropped_privileges\nfrom hades.config.loader import load_config\nlogger = logging.getLogger(__package__)\n\n\ndef check_database(engine: Engine, user_name: pwd.struct_passwd, tables:\n Iterable[Table]):\n logger.info('Checking database access as user %s', user_name)\n try:\n conn = engine.connect()\n except DBAPIError as e:\n logger.critical('Could not connect to database as %s: %s', user_name, e\n )\n raise\n with contextlib.closing(conn):\n for table in tables:\n try:\n check_table(conn, table)\n except DBAPIError as e:\n logger.critical(\n 'Query check for table %s as user %s failed: %s', table\n .name, user_name, e)\n raise\n\n\ndef check_table(conn, table):\n conn.execute(select([exists(select([null()]).select_from(table))])).scalar(\n )\n\n\ndef main():\n parser = ArgumentParser(parents=[common_parser])\n args = parser.parse_args()\n setup_cli_logging(parser.prog, args)\n config = load_config(args.config, runtime_checks=True)\n try:\n engine = db.create_engine(config, poolclass=NullPool)\n agent_pwd = pwd.getpwnam(constants.AGENT_USER)\n with dropped_privileges(agent_pwd):\n check_database(engine, agent_pwd.pw_name, (db.radacct, db.\n radpostauth))\n portal_pwd = pwd.getpwnam(constants.PORTAL_USER)\n with dropped_privileges(portal_pwd):\n check_database(engine, portal_pwd.pw_name, (db.radacct, db.\n radpostauth, db.radusergroup))\n radius_pwd = pwd.getpwnam(constants.RADIUS_USER)\n with dropped_privileges(radius_pwd):\n check_database(engine, radius_pwd.pw_name, (db.radacct, db.\n radgroupcheck, db.radgroupreply, db.radpostauth, db.\n radreply, db.radusergroup))\n except DBAPIError:\n return os.EX_TEMPFAIL\n return os.EX_OK\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-5": "import contextlib\nimport logging\nimport os\nimport pwd\nimport sys\nfrom typing import Iterable\n\nfrom sqlalchemy import Table, exists, null, select\nfrom sqlalchemy.engine import Engine\nfrom sqlalchemy.exc import DBAPIError\nfrom sqlalchemy.pool import NullPool\n\nfrom hades import constants\nfrom hades.common import db\nfrom hades.common.cli import (\n ArgumentParser, parser as common_parser, setup_cli_logging,\n)\nfrom hades.common.privileges import dropped_privileges\nfrom hades.config.loader import load_config\n\nlogger = logging.getLogger(__package__)\n\n\ndef check_database(engine: Engine, user_name: pwd.struct_passwd,\n tables: Iterable[Table]):\n logger.info(\"Checking database access as user %s\", user_name)\n try:\n conn = engine.connect()\n except DBAPIError as e:\n logger.critical(\"Could not connect to database as %s: %s\",\n user_name, e)\n raise\n with contextlib.closing(conn):\n for table in tables:\n try:\n check_table(conn, table)\n except DBAPIError as e:\n logger.critical(\"Query check for table %s as user %s failed: \"\n \"%s\", table.name, user_name, e)\n raise\n\n\ndef check_table(conn, table):\n conn.execute(select([exists(select([null()]).select_from(table))])).scalar()\n\n\ndef main():\n parser = ArgumentParser(parents=[common_parser])\n args = parser.parse_args()\n setup_cli_logging(parser.prog, args)\n config = load_config(args.config, runtime_checks=True)\n try:\n engine = db.create_engine(config, poolclass=NullPool)\n agent_pwd = pwd.getpwnam(constants.AGENT_USER)\n with dropped_privileges(agent_pwd):\n check_database(engine, agent_pwd.pw_name,\n (db.radacct, db.radpostauth))\n portal_pwd = pwd.getpwnam(constants.PORTAL_USER)\n with dropped_privileges(portal_pwd):\n check_database(engine, portal_pwd.pw_name,\n (db.radacct, db.radpostauth, db.radusergroup))\n radius_pwd = pwd.getpwnam(constants.RADIUS_USER)\n with dropped_privileges(radius_pwd):\n check_database(engine, radius_pwd.pw_name,\n (db.radacct, db.radgroupcheck, db.radgroupreply,\n db.radpostauth, db.radreply, db.radusergroup))\n except DBAPIError:\n return os.EX_TEMPFAIL\n return os.EX_OK\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from django.apps import AppConfig
class AccountsnConfig(AppConfig):
name = 'accounts'
|
normal
|
{
"blob_id": "a3fc624d6d101667ab11842eac96ed1b34d4317e",
"index": 3369,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass AccountsnConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AccountsnConfig(AppConfig):\n name = 'accounts'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass AccountsnConfig(AppConfig):\n name = 'accounts'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.contrib import admin
from django.urls import path, include, re_path
from django.conf.urls import include
# from rest_framework import routers
from rest_framework.authtoken import views
# from adventure.api import PlayerViewSet, RoomViewSet
# from adventure.api import move
# router = routers.DefaultRouter()
# router.register('rooms', RoomViewSet)
# router.register('currentRoom', PlayerViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('api.urls')),
path('api/adv/', include('adventure.urls')),
# path('api-token-auth', views.obtain_auth_token)
]
|
normal
|
{
"blob_id": "a14114f9bb677601e6d75a72b84ec128fc9bbe61",
"index": 71,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('admin/', admin.site.urls), path('api/', include(\n 'api.urls')), path('api/adv/', include('adventure.urls'))]\n",
"step-3": "from django.contrib import admin\nfrom django.urls import path, include, re_path\nfrom django.conf.urls import include\nfrom rest_framework.authtoken import views\nurlpatterns = [path('admin/', admin.site.urls), path('api/', include(\n 'api.urls')), path('api/adv/', include('adventure.urls'))]\n",
"step-4": "from django.contrib import admin\nfrom django.urls import path, include, re_path\nfrom django.conf.urls import include\n# from rest_framework import routers\nfrom rest_framework.authtoken import views\n# from adventure.api import PlayerViewSet, RoomViewSet\n\n\n\n# from adventure.api import move\n\n# router = routers.DefaultRouter()\n# router.register('rooms', RoomViewSet)\n# router.register('currentRoom', PlayerViewSet)\n\n\n\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('api/', include('api.urls')),\n path('api/adv/', include('adventure.urls')),\n # path('api-token-auth', views.obtain_auth_token)\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import tkinter as tk
import random
from tkinter import messagebox as mb
n = 16
class Application(tk.Frame):
playButtons = [0] * n
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.grid(sticky='NEWS')
self.createWidgets()
def show_win(self):
msg = "YOU WIN!"
mb.showinfo("Information", msg)
self.makePlayButtons()
def move(self, num):
def move2(self=self, num=num):
index = self.numbers.index(num)
r = index // 4
c = index % 4
if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:
self.numbers[4 * (r - 1) + c], self.numbers[index] = self.numbers[index], self.numbers[4 * (r - 1) + c]
self.playButtons[index].grid(row=r - 1 + 1, column=c)
self.playButtons[4 * (r - 1) + c], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * (r - 1) + c]
elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:
self.numbers[4 * (r + 1) + c], self.numbers[index] = self.numbers[index], self.numbers[4 * (r + 1) + c]
self.playButtons[index].grid(row=r + 1 + 1, column=c)
self.playButtons[4 * (r + 1) + c], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * (r + 1) + c]
elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:
self.numbers[4 * r + c + 1], self.numbers[index] = self.numbers[index], self.numbers[4 * r + c + 1]
self.playButtons[index].grid(row=r + 1, column=c + 1)
self.playButtons[4 * r + c + 1], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * r + c + 1]
elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:
self.numbers[4 * r + c - 1], self.numbers[index] = self.numbers[index], self.numbers[4 * r + c - 1]
self.playButtons[index].grid(row=r + 1, column=c - 1)
self.playButtons[4 * r + c - 1], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * r + c - 1]
if self.numbers == [i % 16 for i in range(1, 17)]:
self.show_win()
return move2
def makePlayButtons(self):
for but in self.playButtons:
if but != 0:
but.destroy()
self.numbers = [i for i in range(n)]
random.shuffle(self.numbers)
self.playButtons = [0] * n
for i in range(n):
if self.numbers[i] == 0:
continue
self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]), command=self.move(self.numbers[i]))
self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky='SENW')
def createWidgets(self):
self.quitButton = tk.Button(self, text='Exit', command=self.quit)
self.newButton = tk.Button(self, text='New', command=self.makePlayButtons)
self.makePlayButtons()
self.quitButton.grid(row=0, column=0)
self.newButton.grid(row=0, column=2)
self.master.columnconfigure(0, weight = 1)
self.master.rowconfigure(0, weight = 1)
for r in range(1, 5):
self.rowconfigure(r, weight = 1)
for c in range(4):
self.columnconfigure(c, weight = 1)
#self.show_win()
app = Application()
app.master.title('15 puzzle')
app.mainloop()
|
normal
|
{
"blob_id": "f29bc0263f8bb1d59ab2442347727d9d3233ec77",
"index": 9893,
"step-1": "<mask token>\n\n\nclass Application(tk.Frame):\n <mask token>\n <mask token>\n\n def show_win(self):\n msg = 'YOU WIN!'\n mb.showinfo('Information', msg)\n self.makePlayButtons()\n\n def move(self, num):\n\n def move2(self=self, num=num):\n index = self.numbers.index(num)\n r = index // 4\n c = index % 4\n if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:\n self.numbers[4 * (r - 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r - 1) + c]\n self.playButtons[index].grid(row=r - 1 + 1, column=c)\n self.playButtons[4 * (r - 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r - \n 1) + c]\n elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:\n self.numbers[4 * (r + 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r + 1) + c]\n self.playButtons[index].grid(row=r + 1 + 1, column=c)\n self.playButtons[4 * (r + 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r + \n 1) + c]\n elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:\n self.numbers[4 * r + c + 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c + 1]\n self.playButtons[index].grid(row=r + 1, column=c + 1)\n self.playButtons[4 * r + c + 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c + 1\n ]\n elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:\n self.numbers[4 * r + c - 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c - 1]\n self.playButtons[index].grid(row=r + 1, column=c - 1)\n self.playButtons[4 * r + c - 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c - 1\n ]\n if self.numbers == [(i % 16) for i in range(1, 17)]:\n self.show_win()\n return move2\n\n def makePlayButtons(self):\n for but in self.playButtons:\n if but != 0:\n but.destroy()\n self.numbers = [i for i in range(n)]\n random.shuffle(self.numbers)\n self.playButtons = [0] * n\n for i in range(n):\n if self.numbers[i] == 0:\n continue\n self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]),\n command=self.move(self.numbers[i]))\n self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky=\n 'SENW')\n\n def createWidgets(self):\n self.quitButton = tk.Button(self, text='Exit', command=self.quit)\n self.newButton = tk.Button(self, text='New', command=self.\n makePlayButtons)\n self.makePlayButtons()\n self.quitButton.grid(row=0, column=0)\n self.newButton.grid(row=0, column=2)\n self.master.columnconfigure(0, weight=1)\n self.master.rowconfigure(0, weight=1)\n for r in range(1, 5):\n self.rowconfigure(r, weight=1)\n for c in range(4):\n self.columnconfigure(c, weight=1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Application(tk.Frame):\n playButtons = [0] * n\n\n def __init__(self, master=None):\n tk.Frame.__init__(self, master)\n self.grid(sticky='NEWS')\n self.createWidgets()\n\n def show_win(self):\n msg = 'YOU WIN!'\n mb.showinfo('Information', msg)\n self.makePlayButtons()\n\n def move(self, num):\n\n def move2(self=self, num=num):\n index = self.numbers.index(num)\n r = index // 4\n c = index % 4\n if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:\n self.numbers[4 * (r - 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r - 1) + c]\n self.playButtons[index].grid(row=r - 1 + 1, column=c)\n self.playButtons[4 * (r - 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r - \n 1) + c]\n elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:\n self.numbers[4 * (r + 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r + 1) + c]\n self.playButtons[index].grid(row=r + 1 + 1, column=c)\n self.playButtons[4 * (r + 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r + \n 1) + c]\n elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:\n self.numbers[4 * r + c + 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c + 1]\n self.playButtons[index].grid(row=r + 1, column=c + 1)\n self.playButtons[4 * r + c + 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c + 1\n ]\n elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:\n self.numbers[4 * r + c - 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c - 1]\n self.playButtons[index].grid(row=r + 1, column=c - 1)\n self.playButtons[4 * r + c - 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c - 1\n ]\n if self.numbers == [(i % 16) for i in range(1, 17)]:\n self.show_win()\n return move2\n\n def makePlayButtons(self):\n for but in self.playButtons:\n if but != 0:\n but.destroy()\n self.numbers = [i for i in range(n)]\n random.shuffle(self.numbers)\n self.playButtons = [0] * n\n for i in range(n):\n if self.numbers[i] == 0:\n continue\n self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]),\n command=self.move(self.numbers[i]))\n self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky=\n 'SENW')\n\n def createWidgets(self):\n self.quitButton = tk.Button(self, text='Exit', command=self.quit)\n self.newButton = tk.Button(self, text='New', command=self.\n makePlayButtons)\n self.makePlayButtons()\n self.quitButton.grid(row=0, column=0)\n self.newButton.grid(row=0, column=2)\n self.master.columnconfigure(0, weight=1)\n self.master.rowconfigure(0, weight=1)\n for r in range(1, 5):\n self.rowconfigure(r, weight=1)\n for c in range(4):\n self.columnconfigure(c, weight=1)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Application(tk.Frame):\n playButtons = [0] * n\n\n def __init__(self, master=None):\n tk.Frame.__init__(self, master)\n self.grid(sticky='NEWS')\n self.createWidgets()\n\n def show_win(self):\n msg = 'YOU WIN!'\n mb.showinfo('Information', msg)\n self.makePlayButtons()\n\n def move(self, num):\n\n def move2(self=self, num=num):\n index = self.numbers.index(num)\n r = index // 4\n c = index % 4\n if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:\n self.numbers[4 * (r - 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r - 1) + c]\n self.playButtons[index].grid(row=r - 1 + 1, column=c)\n self.playButtons[4 * (r - 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r - \n 1) + c]\n elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:\n self.numbers[4 * (r + 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r + 1) + c]\n self.playButtons[index].grid(row=r + 1 + 1, column=c)\n self.playButtons[4 * (r + 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r + \n 1) + c]\n elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:\n self.numbers[4 * r + c + 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c + 1]\n self.playButtons[index].grid(row=r + 1, column=c + 1)\n self.playButtons[4 * r + c + 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c + 1\n ]\n elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:\n self.numbers[4 * r + c - 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c - 1]\n self.playButtons[index].grid(row=r + 1, column=c - 1)\n self.playButtons[4 * r + c - 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c - 1\n ]\n if self.numbers == [(i % 16) for i in range(1, 17)]:\n self.show_win()\n return move2\n\n def makePlayButtons(self):\n for but in self.playButtons:\n if but != 0:\n but.destroy()\n self.numbers = [i for i in range(n)]\n random.shuffle(self.numbers)\n self.playButtons = [0] * n\n for i in range(n):\n if self.numbers[i] == 0:\n continue\n self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]),\n command=self.move(self.numbers[i]))\n self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky=\n 'SENW')\n\n def createWidgets(self):\n self.quitButton = tk.Button(self, text='Exit', command=self.quit)\n self.newButton = tk.Button(self, text='New', command=self.\n makePlayButtons)\n self.makePlayButtons()\n self.quitButton.grid(row=0, column=0)\n self.newButton.grid(row=0, column=2)\n self.master.columnconfigure(0, weight=1)\n self.master.rowconfigure(0, weight=1)\n for r in range(1, 5):\n self.rowconfigure(r, weight=1)\n for c in range(4):\n self.columnconfigure(c, weight=1)\n\n\n<mask token>\napp.master.title('15 puzzle')\napp.mainloop()\n",
"step-4": "<mask token>\nn = 16\n\n\nclass Application(tk.Frame):\n playButtons = [0] * n\n\n def __init__(self, master=None):\n tk.Frame.__init__(self, master)\n self.grid(sticky='NEWS')\n self.createWidgets()\n\n def show_win(self):\n msg = 'YOU WIN!'\n mb.showinfo('Information', msg)\n self.makePlayButtons()\n\n def move(self, num):\n\n def move2(self=self, num=num):\n index = self.numbers.index(num)\n r = index // 4\n c = index % 4\n if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:\n self.numbers[4 * (r - 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r - 1) + c]\n self.playButtons[index].grid(row=r - 1 + 1, column=c)\n self.playButtons[4 * (r - 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r - \n 1) + c]\n elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:\n self.numbers[4 * (r + 1) + c], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * (r + 1) + c]\n self.playButtons[index].grid(row=r + 1 + 1, column=c)\n self.playButtons[4 * (r + 1) + c], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * (r + \n 1) + c]\n elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:\n self.numbers[4 * r + c + 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c + 1]\n self.playButtons[index].grid(row=r + 1, column=c + 1)\n self.playButtons[4 * r + c + 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c + 1\n ]\n elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:\n self.numbers[4 * r + c - 1], self.numbers[index\n ] = self.numbers[index], self.numbers[4 * r + c - 1]\n self.playButtons[index].grid(row=r + 1, column=c - 1)\n self.playButtons[4 * r + c - 1], self.playButtons[index\n ] = self.playButtons[index], self.playButtons[4 * r + c - 1\n ]\n if self.numbers == [(i % 16) for i in range(1, 17)]:\n self.show_win()\n return move2\n\n def makePlayButtons(self):\n for but in self.playButtons:\n if but != 0:\n but.destroy()\n self.numbers = [i for i in range(n)]\n random.shuffle(self.numbers)\n self.playButtons = [0] * n\n for i in range(n):\n if self.numbers[i] == 0:\n continue\n self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]),\n command=self.move(self.numbers[i]))\n self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky=\n 'SENW')\n\n def createWidgets(self):\n self.quitButton = tk.Button(self, text='Exit', command=self.quit)\n self.newButton = tk.Button(self, text='New', command=self.\n makePlayButtons)\n self.makePlayButtons()\n self.quitButton.grid(row=0, column=0)\n self.newButton.grid(row=0, column=2)\n self.master.columnconfigure(0, weight=1)\n self.master.rowconfigure(0, weight=1)\n for r in range(1, 5):\n self.rowconfigure(r, weight=1)\n for c in range(4):\n self.columnconfigure(c, weight=1)\n\n\napp = Application()\napp.master.title('15 puzzle')\napp.mainloop()\n",
"step-5": "import tkinter as tk\nimport random\nfrom tkinter import messagebox as mb\n\nn = 16\n\n\nclass Application(tk.Frame):\n playButtons = [0] * n\n def __init__(self, master=None):\n tk.Frame.__init__(self, master)\n self.grid(sticky='NEWS')\n self.createWidgets()\n\n def show_win(self):\n msg = \"YOU WIN!\"\n mb.showinfo(\"Information\", msg)\n self.makePlayButtons()\n\n def move(self, num): \n def move2(self=self, num=num):\n index = self.numbers.index(num)\n r = index // 4\n c = index % 4\n if r - 1 >= 0 and self.numbers[4 * (r - 1) + c] == 0:\n self.numbers[4 * (r - 1) + c], self.numbers[index] = self.numbers[index], self.numbers[4 * (r - 1) + c]\n self.playButtons[index].grid(row=r - 1 + 1, column=c)\n self.playButtons[4 * (r - 1) + c], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * (r - 1) + c]\n\n elif r + 1 <= 3 and self.numbers[4 * (r + 1) + c] == 0:\n self.numbers[4 * (r + 1) + c], self.numbers[index] = self.numbers[index], self.numbers[4 * (r + 1) + c]\n self.playButtons[index].grid(row=r + 1 + 1, column=c)\n self.playButtons[4 * (r + 1) + c], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * (r + 1) + c]\n elif c + 1 <= 3 and self.numbers[4 * r + c + 1] == 0:\n self.numbers[4 * r + c + 1], self.numbers[index] = self.numbers[index], self.numbers[4 * r + c + 1]\n self.playButtons[index].grid(row=r + 1, column=c + 1)\n self.playButtons[4 * r + c + 1], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * r + c + 1]\n elif c - 1 >= 0 and self.numbers[4 * r + c - 1] == 0:\n self.numbers[4 * r + c - 1], self.numbers[index] = self.numbers[index], self.numbers[4 * r + c - 1]\n self.playButtons[index].grid(row=r + 1, column=c - 1)\n self.playButtons[4 * r + c - 1], self.playButtons[index] = self.playButtons[index], self.playButtons[4 * r + c - 1]\n if self.numbers == [i % 16 for i in range(1, 17)]:\n self.show_win()\n return move2 \n\n def makePlayButtons(self):\n for but in self.playButtons:\n if but != 0:\n but.destroy()\n\n self.numbers = [i for i in range(n)]\n random.shuffle(self.numbers)\n\n self.playButtons = [0] * n\n for i in range(n):\n if self.numbers[i] == 0:\n continue\n self.playButtons[i] = tk.Button(self, text=str(self.numbers[i]), command=self.move(self.numbers[i]))\n self.playButtons[i].grid(row=i // 4 + 1, column=i % 4, sticky='SENW')\n\n\n\n def createWidgets(self):\n \n\n self.quitButton = tk.Button(self, text='Exit', command=self.quit)\n self.newButton = tk.Button(self, text='New', command=self.makePlayButtons)\n\n self.makePlayButtons()\n\n self.quitButton.grid(row=0, column=0)\n self.newButton.grid(row=0, column=2)\n\n self.master.columnconfigure(0, weight = 1)\n self.master.rowconfigure(0, weight = 1)\n\n for r in range(1, 5):\n self.rowconfigure(r, weight = 1)\n for c in range(4):\n self.columnconfigure(c, weight = 1)\n #self.show_win()\n\napp = Application()\napp.master.title('15 puzzle')\napp.mainloop()",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
# Generated by Django 3.1 on 2020-09-26 03:46
import datetime
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('bcs', '0002_auto_20200915_2245'),
]
operations = [
migrations.AddField(
model_name='study_material',
name='study_materail_date',
field=models.DateField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='exam',
name='exam_date',
field=models.DateField(blank=True, default=datetime.date(2020, 9, 26)),
),
migrations.AlterField(
model_name='exam',
name='exam_time',
field=models.IntegerField(default=10),
),
migrations.AlterField(
model_name='study_material',
name='study_image',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
]
|
normal
|
{
"blob_id": "61484d9a08f2e3fcd15573ce89be4118a442dc2e",
"index": 6062,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('bcs', '0002_auto_20200915_2245')]\n operations = [migrations.AddField(model_name='study_material', name=\n 'study_materail_date', field=models.DateField(auto_now_add=True,\n default=django.utils.timezone.now), preserve_default=False),\n migrations.AlterField(model_name='exam', name='exam_date', field=\n models.DateField(blank=True, default=datetime.date(2020, 9, 26))),\n migrations.AlterField(model_name='exam', name='exam_time', field=\n models.IntegerField(default=10)), migrations.AlterField(model_name=\n 'study_material', name='study_image', field=models.ImageField(blank\n =True, null=True, upload_to='images/'))]\n",
"step-4": "import datetime\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n dependencies = [('bcs', '0002_auto_20200915_2245')]\n operations = [migrations.AddField(model_name='study_material', name=\n 'study_materail_date', field=models.DateField(auto_now_add=True,\n default=django.utils.timezone.now), preserve_default=False),\n migrations.AlterField(model_name='exam', name='exam_date', field=\n models.DateField(blank=True, default=datetime.date(2020, 9, 26))),\n migrations.AlterField(model_name='exam', name='exam_time', field=\n models.IntegerField(default=10)), migrations.AlterField(model_name=\n 'study_material', name='study_image', field=models.ImageField(blank\n =True, null=True, upload_to='images/'))]\n",
"step-5": "# Generated by Django 3.1 on 2020-09-26 03:46\n\nimport datetime\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('bcs', '0002_auto_20200915_2245'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='study_material',\n name='study_materail_date',\n field=models.DateField(auto_now_add=True, default=django.utils.timezone.now),\n preserve_default=False,\n ),\n migrations.AlterField(\n model_name='exam',\n name='exam_date',\n field=models.DateField(blank=True, default=datetime.date(2020, 9, 26)),\n ),\n migrations.AlterField(\n model_name='exam',\n name='exam_time',\n field=models.IntegerField(default=10),\n ),\n migrations.AlterField(\n model_name='study_material',\n name='study_image',\n field=models.ImageField(blank=True, null=True, upload_to='images/'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class _CORPORALS(_CORPORAL):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class _CORPORALS(_CORPORAL):
def __init__(self):
_CORPORAL.__init__(self)
self.name = 'CORPORALS'
self.specie = 'adjectives'
self.basic = 'corporal'
self.jsondata = {}
<|reserved_special_token_1|>
from xai.brain.wordbase.adjectives._corporal import _CORPORAL
class _CORPORALS(_CORPORAL):
def __init__(self):
_CORPORAL.__init__(self)
self.name = 'CORPORALS'
self.specie = 'adjectives'
self.basic = 'corporal'
self.jsondata = {}
<|reserved_special_token_1|>
from xai.brain.wordbase.adjectives._corporal import _CORPORAL
#calss header
class _CORPORALS(_CORPORAL, ):
def __init__(self,):
_CORPORAL.__init__(self)
self.name = "CORPORALS"
self.specie = 'adjectives'
self.basic = "corporal"
self.jsondata = {}
|
flexible
|
{
"blob_id": "d2787f17a46cf0db9aeea82f1b97ee8d630fd28a",
"index": 8932,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass _CORPORALS(_CORPORAL):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass _CORPORALS(_CORPORAL):\n\n def __init__(self):\n _CORPORAL.__init__(self)\n self.name = 'CORPORALS'\n self.specie = 'adjectives'\n self.basic = 'corporal'\n self.jsondata = {}\n",
"step-4": "from xai.brain.wordbase.adjectives._corporal import _CORPORAL\n\n\nclass _CORPORALS(_CORPORAL):\n\n def __init__(self):\n _CORPORAL.__init__(self)\n self.name = 'CORPORALS'\n self.specie = 'adjectives'\n self.basic = 'corporal'\n self.jsondata = {}\n",
"step-5": "\n\nfrom xai.brain.wordbase.adjectives._corporal import _CORPORAL\n\n#calss header\nclass _CORPORALS(_CORPORAL, ):\n\tdef __init__(self,): \n\t\t_CORPORAL.__init__(self)\n\t\tself.name = \"CORPORALS\"\n\t\tself.specie = 'adjectives'\n\t\tself.basic = \"corporal\"\n\t\tself.jsondata = {}\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
conf = open('../conf/linked.data.gov.au-vocabularies.conf')
new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',
'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',
'reg-roles', 'reg-statuses', 'address-type',
'australian-states-and-territories', 'bc-labels',
'data-access-rights', 'dataciteroles', 'depth-reference',
'geo-commodities', 'geoadminfeatures', 'geofeatures',
'geological-observation-instrument',
'geological-observation-method', 'geological-observation-type',
'geological-sites', 'geometry-roles', 'georesource-report',
'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',
'gsq-sample-facility', 'iso639-1',
'iso-19157-data-quality-dimension',
'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',
'party-identifier-type', 'qg-agent', 'qg-file-types',
'qg-security-classifications', 'qg-sites', 'qld-data-licenses',
'iso19115-1/RoleCode', 'minerals', 'nslvoc',
'observation-detail-type', 'organisation-activity-status',
'organisation-name-types', 'organisation-type',
'party-relationship', 'queensland-crs',
'qld-resource-permit-status', 'qld-resource-permit',
'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',
'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',
'report-status', 'resource-project-lifecycle', 'resource-types',
'result-type', 'sample-detail-type', 'sample-location-status',
'sample-location-types', 'sample-material',
'sample-preparation-methods', 'sample-relationship', 'sample-type',
'seismic-dimensionality', 'site-detail-type', 'site-relationships',
'site-status', 'supermodel/terms', 'survey-detail-type',
'survey-method', 'survey-relationship-type', 'survey-status',
'survey-type', 'telephone-type', 'tk-labels', 'trs']
lines = conf.readlines()
for n in new:
for line in lines:
if n in line:
pattern, match = line.split('$', 1)
print(pattern.strip().replace('RewriteRule ^',
'https://linked.data.gov.au/'), ' -- ', match.split(
'[R')[0].replace('"', '').strip())
break
<|reserved_special_token_1|>
import tests.functions as functions
if __name__ == '__main__':
conf = open('../conf/linked.data.gov.au-vocabularies.conf')
new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',
'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',
'reg-roles', 'reg-statuses', 'address-type',
'australian-states-and-territories', 'bc-labels',
'data-access-rights', 'dataciteroles', 'depth-reference',
'geo-commodities', 'geoadminfeatures', 'geofeatures',
'geological-observation-instrument',
'geological-observation-method', 'geological-observation-type',
'geological-sites', 'geometry-roles', 'georesource-report',
'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',
'gsq-sample-facility', 'iso639-1',
'iso-19157-data-quality-dimension',
'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',
'party-identifier-type', 'qg-agent', 'qg-file-types',
'qg-security-classifications', 'qg-sites', 'qld-data-licenses',
'iso19115-1/RoleCode', 'minerals', 'nslvoc',
'observation-detail-type', 'organisation-activity-status',
'organisation-name-types', 'organisation-type',
'party-relationship', 'queensland-crs',
'qld-resource-permit-status', 'qld-resource-permit',
'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',
'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',
'report-status', 'resource-project-lifecycle', 'resource-types',
'result-type', 'sample-detail-type', 'sample-location-status',
'sample-location-types', 'sample-material',
'sample-preparation-methods', 'sample-relationship', 'sample-type',
'seismic-dimensionality', 'site-detail-type', 'site-relationships',
'site-status', 'supermodel/terms', 'survey-detail-type',
'survey-method', 'survey-relationship-type', 'survey-status',
'survey-type', 'telephone-type', 'tk-labels', 'trs']
lines = conf.readlines()
for n in new:
for line in lines:
if n in line:
pattern, match = line.split('$', 1)
print(pattern.strip().replace('RewriteRule ^',
'https://linked.data.gov.au/'), ' -- ', match.split(
'[R')[0].replace('"', '').strip())
break
<|reserved_special_token_1|>
import tests.functions as functions
if __name__ == "__main__":
# functions.validate_all_redirects("linked.data.gov.au-vocabularies.json")
conf = open("../conf/linked.data.gov.au-vocabularies.conf")
new = [
"anzsrc-for",
"anzsrc-seo",
"ausplots-cv",
"australian-phone-area-codes",
"care",
"corveg-cv",
"nrm",
"reg-roles",
"reg-statuses",
"address-type",
"australian-states-and-territories",
"bc-labels",
"data-access-rights",
"dataciteroles",
"depth-reference",
"geo-commodities",
"geoadminfeatures",
"geofeatures",
"geological-observation-instrument",
"geological-observation-method",
"geological-observation-type",
"geological-sites",
"geometry-roles",
"georesource-report",
"gsq-alias",
"gsq-dataset-theme",
"gsq-roles",
"gsq-sample-facility",
"iso639-1",
"iso-19157-data-quality-dimension",
"iso-iec-25012-data-quality-dimension",
"nsw-quality-dimension",
"party-identifier-type",
"qg-agent",
"qg-file-types",
"qg-security-classifications",
"qg-sites",
"qld-data-licenses",
"iso19115-1/RoleCode",
"minerals",
"nslvoc",
"observation-detail-type",
"organisation-activity-status",
"organisation-name-types",
"organisation-type",
"party-relationship",
"queensland-crs",
"qld-resource-permit-status",
"qld-resource-permit",
"qld-utm-zones",
"geou",
"iso11179-6/RolesAndResponsibilities",
"qesd-qkd",
"qesd-uom",
"qld-obsprop",
"report-detail-type",
"report-status",
"resource-project-lifecycle",
"resource-types",
"result-type",
"sample-detail-type",
"sample-location-status",
"sample-location-types",
"sample-material",
"sample-preparation-methods",
"sample-relationship",
"sample-type",
"seismic-dimensionality",
"site-detail-type",
"site-relationships",
"site-status",
"supermodel/terms",
"survey-detail-type",
"survey-method",
"survey-relationship-type",
"survey-status",
"survey-type",
"telephone-type",
"tk-labels",
"trs"
]
lines = conf.readlines()
for n in new:
for line in lines:
if n in line:
pattern, match = line.split("$", 1)
print(pattern.strip().replace("RewriteRule ^", "https://linked.data.gov.au/"), " -- ", match.split("[R")[0].replace('"', '').strip())
break
|
flexible
|
{
"blob_id": "4a620957b2cd1e5945d98e49a5eae5d5592ef5a2",
"index": 3911,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n conf = open('../conf/linked.data.gov.au-vocabularies.conf')\n new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',\n 'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',\n 'reg-roles', 'reg-statuses', 'address-type',\n 'australian-states-and-territories', 'bc-labels',\n 'data-access-rights', 'dataciteroles', 'depth-reference',\n 'geo-commodities', 'geoadminfeatures', 'geofeatures',\n 'geological-observation-instrument',\n 'geological-observation-method', 'geological-observation-type',\n 'geological-sites', 'geometry-roles', 'georesource-report',\n 'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',\n 'gsq-sample-facility', 'iso639-1',\n 'iso-19157-data-quality-dimension',\n 'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',\n 'party-identifier-type', 'qg-agent', 'qg-file-types',\n 'qg-security-classifications', 'qg-sites', 'qld-data-licenses',\n 'iso19115-1/RoleCode', 'minerals', 'nslvoc',\n 'observation-detail-type', 'organisation-activity-status',\n 'organisation-name-types', 'organisation-type',\n 'party-relationship', 'queensland-crs',\n 'qld-resource-permit-status', 'qld-resource-permit',\n 'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',\n 'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',\n 'report-status', 'resource-project-lifecycle', 'resource-types',\n 'result-type', 'sample-detail-type', 'sample-location-status',\n 'sample-location-types', 'sample-material',\n 'sample-preparation-methods', 'sample-relationship', 'sample-type',\n 'seismic-dimensionality', 'site-detail-type', 'site-relationships',\n 'site-status', 'supermodel/terms', 'survey-detail-type',\n 'survey-method', 'survey-relationship-type', 'survey-status',\n 'survey-type', 'telephone-type', 'tk-labels', 'trs']\n lines = conf.readlines()\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split('$', 1)\n print(pattern.strip().replace('RewriteRule ^',\n 'https://linked.data.gov.au/'), ' -- ', match.split(\n '[R')[0].replace('\"', '').strip())\n break\n",
"step-3": "import tests.functions as functions\nif __name__ == '__main__':\n conf = open('../conf/linked.data.gov.au-vocabularies.conf')\n new = ['anzsrc-for', 'anzsrc-seo', 'ausplots-cv',\n 'australian-phone-area-codes', 'care', 'corveg-cv', 'nrm',\n 'reg-roles', 'reg-statuses', 'address-type',\n 'australian-states-and-territories', 'bc-labels',\n 'data-access-rights', 'dataciteroles', 'depth-reference',\n 'geo-commodities', 'geoadminfeatures', 'geofeatures',\n 'geological-observation-instrument',\n 'geological-observation-method', 'geological-observation-type',\n 'geological-sites', 'geometry-roles', 'georesource-report',\n 'gsq-alias', 'gsq-dataset-theme', 'gsq-roles',\n 'gsq-sample-facility', 'iso639-1',\n 'iso-19157-data-quality-dimension',\n 'iso-iec-25012-data-quality-dimension', 'nsw-quality-dimension',\n 'party-identifier-type', 'qg-agent', 'qg-file-types',\n 'qg-security-classifications', 'qg-sites', 'qld-data-licenses',\n 'iso19115-1/RoleCode', 'minerals', 'nslvoc',\n 'observation-detail-type', 'organisation-activity-status',\n 'organisation-name-types', 'organisation-type',\n 'party-relationship', 'queensland-crs',\n 'qld-resource-permit-status', 'qld-resource-permit',\n 'qld-utm-zones', 'geou', 'iso11179-6/RolesAndResponsibilities',\n 'qesd-qkd', 'qesd-uom', 'qld-obsprop', 'report-detail-type',\n 'report-status', 'resource-project-lifecycle', 'resource-types',\n 'result-type', 'sample-detail-type', 'sample-location-status',\n 'sample-location-types', 'sample-material',\n 'sample-preparation-methods', 'sample-relationship', 'sample-type',\n 'seismic-dimensionality', 'site-detail-type', 'site-relationships',\n 'site-status', 'supermodel/terms', 'survey-detail-type',\n 'survey-method', 'survey-relationship-type', 'survey-status',\n 'survey-type', 'telephone-type', 'tk-labels', 'trs']\n lines = conf.readlines()\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split('$', 1)\n print(pattern.strip().replace('RewriteRule ^',\n 'https://linked.data.gov.au/'), ' -- ', match.split(\n '[R')[0].replace('\"', '').strip())\n break\n",
"step-4": "import tests.functions as functions\n\nif __name__ == \"__main__\":\n # functions.validate_all_redirects(\"linked.data.gov.au-vocabularies.json\")\n\n conf = open(\"../conf/linked.data.gov.au-vocabularies.conf\")\n new = [\n \"anzsrc-for\",\n \"anzsrc-seo\",\n \"ausplots-cv\",\n \"australian-phone-area-codes\",\n \"care\",\n \"corveg-cv\",\n \"nrm\",\n \"reg-roles\",\n \"reg-statuses\",\n \"address-type\",\n \"australian-states-and-territories\",\n \"bc-labels\",\n \"data-access-rights\",\n \"dataciteroles\",\n \"depth-reference\",\n \"geo-commodities\",\n \"geoadminfeatures\",\n \"geofeatures\",\n \"geological-observation-instrument\",\n \"geological-observation-method\",\n \"geological-observation-type\",\n \"geological-sites\",\n \"geometry-roles\",\n \"georesource-report\",\n \"gsq-alias\",\n \"gsq-dataset-theme\",\n \"gsq-roles\",\n \"gsq-sample-facility\",\n \"iso639-1\",\n \"iso-19157-data-quality-dimension\",\n \"iso-iec-25012-data-quality-dimension\",\n \"nsw-quality-dimension\",\n \"party-identifier-type\",\n \"qg-agent\",\n \"qg-file-types\",\n \"qg-security-classifications\",\n \"qg-sites\",\n \"qld-data-licenses\",\n \"iso19115-1/RoleCode\",\n \"minerals\",\n \"nslvoc\",\n \"observation-detail-type\",\n \"organisation-activity-status\",\n \"organisation-name-types\",\n \"organisation-type\",\n \"party-relationship\",\n \"queensland-crs\",\n \"qld-resource-permit-status\",\n \"qld-resource-permit\",\n \"qld-utm-zones\",\n \"geou\",\n \"iso11179-6/RolesAndResponsibilities\",\n \"qesd-qkd\",\n \"qesd-uom\",\n \"qld-obsprop\",\n \"report-detail-type\",\n \"report-status\",\n \"resource-project-lifecycle\",\n \"resource-types\",\n \"result-type\",\n \"sample-detail-type\",\n \"sample-location-status\",\n \"sample-location-types\",\n \"sample-material\",\n \"sample-preparation-methods\",\n \"sample-relationship\",\n \"sample-type\",\n \"seismic-dimensionality\",\n \"site-detail-type\",\n \"site-relationships\",\n \"site-status\",\n \"supermodel/terms\",\n \"survey-detail-type\",\n \"survey-method\",\n \"survey-relationship-type\",\n \"survey-status\",\n \"survey-type\",\n \"telephone-type\",\n \"tk-labels\",\n \"trs\"\n ]\n lines = conf.readlines()\n\n for n in new:\n for line in lines:\n if n in line:\n pattern, match = line.split(\"$\", 1)\n print(pattern.strip().replace(\"RewriteRule ^\", \"https://linked.data.gov.au/\"), \" -- \", match.split(\"[R\")[0].replace('\"', '').strip())\n break",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def convert_type(data_value):
try:
return int(data_value)
except ValueError:
try:
return float(data_value)
except ValueError:
return data_value
<|reserved_special_token_0|>
def get_delim(sourcefile1):
print('> executing get_delim function')
data = open(sourcefile1, 'r')
my_read_data = data.read()
if my_read_data.find(',') > 0:
print(' delimiter: comma')
return ','
else:
print(' delimiter: space')
return ' '
print(' ')
def lines_to_dict(lines, header=False):
print('> executing lines_to_dict')
column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',
'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']
data_dict = {}
for idx, column in enumerate(column_titles):
data_dict[column] = []
for row in lines:
data_dict[column] += [row[idx]]
return data_dict
<|reserved_special_token_0|>
def plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False
):
df = pd.DataFrame.from_dict(dd)
x = np.fromiter(dd[col1], dtype=float)
y1 = np.fromiter(dd[col2a], dtype=float)
y2 = np.fromiter(dd[col2b], dtype=float)
fig, ax1 = plt.subplots()
plt.title(label1 + ' by ' + label2a + ' and ' + label2b)
clra = 'indigo'
ax1.set_xlabel(label1)
ax1.set_ylabel(label2a, color=clra)
ax1.scatter(df[col1], df[col2a], color=clra, marker='^')
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y1, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clra)
ax1.tick_params(axis='y', labelcolor=clra)
ax2 = ax1.twinx()
clrb = 'darkgreen'
ax2.set_ylabel(label2b, color=clrb)
ax2.scatter(df[col1], df[col2b], color=clrb)
ax2.tick_params(axis='y', labelcolor=clrb)
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y2, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clrb)
ax1.tick_params(axis='y', labelcolor=clra)
fig.tight_layout()
plt.savefig('an2_colour' + n + '.png')
plt.show()
def main():
data_file = 'wine.data'
dlm = get_delim(data_file)
my_data = parse_file(data_file, dlm)
data_dictionary = lines_to_dict(my_data)
plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',
'Alcohol', 'Flavonoids', '1')
plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',
'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def convert_type(data_value):
try:
return int(data_value)
except ValueError:
try:
return float(data_value)
except ValueError:
return data_value
<|reserved_special_token_0|>
def get_delim(sourcefile1):
print('> executing get_delim function')
data = open(sourcefile1, 'r')
my_read_data = data.read()
if my_read_data.find(',') > 0:
print(' delimiter: comma')
return ','
else:
print(' delimiter: space')
return ' '
print(' ')
def lines_to_dict(lines, header=False):
print('> executing lines_to_dict')
column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',
'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']
data_dict = {}
for idx, column in enumerate(column_titles):
data_dict[column] = []
for row in lines:
data_dict[column] += [row[idx]]
return data_dict
def parse_file(data_file, dlm, debug=False):
print('> executing parse_file')
assert op.isfile(data_file)
with open(data_file, 'r') as fhandle:
csv_reader = csv.reader(fhandle, delimiter=dlm)
lines = []
if debug:
count = 0
for line in csv_reader:
if debug:
if count > 2:
break
count += 1
newline = []
for value in line:
newline += [convert_type(value)]
if len(newline) > 0:
lines += [newline]
print('> view a few lines')
print(' ')
for line in lines[0:2]:
print(line)
print(' ')
return lines
def plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False
):
df = pd.DataFrame.from_dict(dd)
x = np.fromiter(dd[col1], dtype=float)
y1 = np.fromiter(dd[col2a], dtype=float)
y2 = np.fromiter(dd[col2b], dtype=float)
fig, ax1 = plt.subplots()
plt.title(label1 + ' by ' + label2a + ' and ' + label2b)
clra = 'indigo'
ax1.set_xlabel(label1)
ax1.set_ylabel(label2a, color=clra)
ax1.scatter(df[col1], df[col2a], color=clra, marker='^')
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y1, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clra)
ax1.tick_params(axis='y', labelcolor=clra)
ax2 = ax1.twinx()
clrb = 'darkgreen'
ax2.set_ylabel(label2b, color=clrb)
ax2.scatter(df[col1], df[col2b], color=clrb)
ax2.tick_params(axis='y', labelcolor=clrb)
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y2, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clrb)
ax1.tick_params(axis='y', labelcolor=clra)
fig.tight_layout()
plt.savefig('an2_colour' + n + '.png')
plt.show()
def main():
data_file = 'wine.data'
dlm = get_delim(data_file)
my_data = parse_file(data_file, dlm)
data_dictionary = lines_to_dict(my_data)
plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',
'Alcohol', 'Flavonoids', '1')
plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',
'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print(
'========================================================================================'
)
print(
'========================================================================================'
)
print('> start of program an2_colour.py')
print('> import libraries')
<|reserved_special_token_0|>
print('> define convert_type function')
def convert_type(data_value):
try:
return int(data_value)
except ValueError:
try:
return float(data_value)
except ValueError:
return data_value
print('> define get_delim function')
def get_delim(sourcefile1):
print('> executing get_delim function')
data = open(sourcefile1, 'r')
my_read_data = data.read()
if my_read_data.find(',') > 0:
print(' delimiter: comma')
return ','
else:
print(' delimiter: space')
return ' '
print(' ')
def lines_to_dict(lines, header=False):
print('> executing lines_to_dict')
column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',
'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']
data_dict = {}
for idx, column in enumerate(column_titles):
data_dict[column] = []
for row in lines:
data_dict[column] += [row[idx]]
return data_dict
def parse_file(data_file, dlm, debug=False):
print('> executing parse_file')
assert op.isfile(data_file)
with open(data_file, 'r') as fhandle:
csv_reader = csv.reader(fhandle, delimiter=dlm)
lines = []
if debug:
count = 0
for line in csv_reader:
if debug:
if count > 2:
break
count += 1
newline = []
for value in line:
newline += [convert_type(value)]
if len(newline) > 0:
lines += [newline]
print('> view a few lines')
print(' ')
for line in lines[0:2]:
print(line)
print(' ')
return lines
def plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False
):
df = pd.DataFrame.from_dict(dd)
x = np.fromiter(dd[col1], dtype=float)
y1 = np.fromiter(dd[col2a], dtype=float)
y2 = np.fromiter(dd[col2b], dtype=float)
fig, ax1 = plt.subplots()
plt.title(label1 + ' by ' + label2a + ' and ' + label2b)
clra = 'indigo'
ax1.set_xlabel(label1)
ax1.set_ylabel(label2a, color=clra)
ax1.scatter(df[col1], df[col2a], color=clra, marker='^')
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y1, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clra)
ax1.tick_params(axis='y', labelcolor=clra)
ax2 = ax1.twinx()
clrb = 'darkgreen'
ax2.set_ylabel(label2b, color=clrb)
ax2.scatter(df[col1], df[col2b], color=clrb)
ax2.tick_params(axis='y', labelcolor=clrb)
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y2, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clrb)
ax1.tick_params(axis='y', labelcolor=clra)
fig.tight_layout()
plt.savefig('an2_colour' + n + '.png')
plt.show()
def main():
data_file = 'wine.data'
dlm = get_delim(data_file)
my_data = parse_file(data_file, dlm)
data_dictionary = lines_to_dict(my_data)
plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',
'Alcohol', 'Flavonoids', '1')
plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',
'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
print(
'========================================================================================'
)
print(
'========================================================================================'
)
print('> start of program an2_colour.py')
print('> import libraries')
import argparse
import os.path as op
import csv
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from numpy.polynomial.polynomial import polyfit
print('> define convert_type function')
def convert_type(data_value):
try:
return int(data_value)
except ValueError:
try:
return float(data_value)
except ValueError:
return data_value
print('> define get_delim function')
def get_delim(sourcefile1):
print('> executing get_delim function')
data = open(sourcefile1, 'r')
my_read_data = data.read()
if my_read_data.find(',') > 0:
print(' delimiter: comma')
return ','
else:
print(' delimiter: space')
return ' '
print(' ')
def lines_to_dict(lines, header=False):
print('> executing lines_to_dict')
column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',
'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']
data_dict = {}
for idx, column in enumerate(column_titles):
data_dict[column] = []
for row in lines:
data_dict[column] += [row[idx]]
return data_dict
def parse_file(data_file, dlm, debug=False):
print('> executing parse_file')
assert op.isfile(data_file)
with open(data_file, 'r') as fhandle:
csv_reader = csv.reader(fhandle, delimiter=dlm)
lines = []
if debug:
count = 0
for line in csv_reader:
if debug:
if count > 2:
break
count += 1
newline = []
for value in line:
newline += [convert_type(value)]
if len(newline) > 0:
lines += [newline]
print('> view a few lines')
print(' ')
for line in lines[0:2]:
print(line)
print(' ')
return lines
def plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False
):
df = pd.DataFrame.from_dict(dd)
x = np.fromiter(dd[col1], dtype=float)
y1 = np.fromiter(dd[col2a], dtype=float)
y2 = np.fromiter(dd[col2b], dtype=float)
fig, ax1 = plt.subplots()
plt.title(label1 + ' by ' + label2a + ' and ' + label2b)
clra = 'indigo'
ax1.set_xlabel(label1)
ax1.set_ylabel(label2a, color=clra)
ax1.scatter(df[col1], df[col2a], color=clra, marker='^')
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y1, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clra)
ax1.tick_params(axis='y', labelcolor=clra)
ax2 = ax1.twinx()
clrb = 'darkgreen'
ax2.set_ylabel(label2b, color=clrb)
ax2.scatter(df[col1], df[col2b], color=clrb)
ax2.tick_params(axis='y', labelcolor=clrb)
xp = np.linspace(np.amin(x), np.amax(x), 100)
weights = np.polyfit(x, y2, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clrb)
ax1.tick_params(axis='y', labelcolor=clra)
fig.tight_layout()
plt.savefig('an2_colour' + n + '.png')
plt.show()
def main():
data_file = 'wine.data'
dlm = get_delim(data_file)
my_data = parse_file(data_file, dlm)
data_dictionary = lines_to_dict(my_data)
plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',
'Alcohol', 'Flavonoids', '1')
plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',
'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
# program name: an2_colour.py
# no optional arguments: Uses Wine data to display information about the relationship of
# various attributes with colour and hue
print('========================================================================================')
print('========================================================================================')
print('> start of program an2_colour.py')
print('> import libraries')
import argparse
import os.path as op
import csv
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from numpy.polynomial.polynomial import polyfit
print('> define convert_type function')
def convert_type(data_value):
try:
return int(data_value)
except ValueError:
try:
return float(data_value)
except ValueError:
return data_value
print("> define get_delim function")
def get_delim(sourcefile1):
print('> executing get_delim function')
data = open(sourcefile1, 'r')
my_read_data = data.read()
if my_read_data.find(',') > 0:
print(' delimiter: comma')
return ','
else:
print(' delimiter: space')
return ' '
print(' ')
def lines_to_dict(lines, header=False):
print('> executing lines_to_dict')
# column_titles = ['Class','Alcohol','Malic acid','Ash','Alcalinity of ash','Magnesium','Total phenols','Flavanoids','Nonflavanoid phenols','Proanthocyanins','Color intensity','Hue',
# 'OD280/OD315 of diluted wines','Proline']
column_titles = ['class','alc','ma','ash','alkash','mg','tphen','flav','nfphen','pac','ci','hue',
'od','proline']
data_dict = {}
for idx, column in enumerate(column_titles):
data_dict[column] = []
for row in lines:
data_dict[column] += [row[idx]]
return data_dict
def parse_file(data_file, dlm, debug=False): # took delimiter out
print('> executing parse_file')
# Verify the file exists
assert(op.isfile(data_file))
# open it as a csv
with open(data_file, 'r') as fhandle:
csv_reader = csv.reader(fhandle, delimiter=dlm)
# Add each line in the file to a list
lines = []
if debug:
count = 0
for line in csv_reader:
if debug:
if count > 2:
break
count += 1
newline = []
for value in line:
newline += [convert_type(value)]
if len(newline) > 0:
lines += [newline]
print('> view a few lines')
print(' ')
for line in lines[0:2]:
print(line)
print(' ')
# Return all the contents of our file
return lines
# class','alc','ma','ash','alkash','mg','tphen','flav','nfphen','pac','ci','hue',
# 'od','proline
def plot_data3(dd, col1, label1,
col2a, col2b,
label2a, label2b, n,
debug=False):
df = pd.DataFrame.from_dict(dd)
x = np.fromiter(dd[col1], dtype=float) # need these for the lines below
y1 = np.fromiter(dd[col2a], dtype=float)
y2 = np.fromiter(dd[col2b], dtype=float)
# print(df)
fig, ax1 = plt.subplots()
plt.title(label1 + ' by ' + label2a + ' and ' + label2b)
clra = 'indigo'
ax1.set_xlabel(label1)
ax1.set_ylabel(label2a, color=clra) # left side
ax1.scatter(df[col1], df[col2a], color=clra, marker = '^')
xp = np.linspace(np.amin(x), np.amax(x), 100) #only works for numpy arrays
weights = np.polyfit(x, y1, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clra)
ax1.tick_params(axis='y', labelcolor=clra)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
clrb = 'darkgreen'
ax2.set_ylabel(label2b, color=clrb) # we already handled the x-label with ax1
# ax2.plot(df[col1], df[col2b], color=color)
ax2.scatter(df[col1], df[col2b], color= clrb)
ax2.tick_params(axis='y', labelcolor=clrb)
xp = np.linspace(np.amin(x), np.amax(x), 100) #only works for numpy arrays
weights = np.polyfit(x, y2, 1)
model = np.poly1d(weights)
plt.plot(xp, model(xp), '-', c=clrb)
ax1.tick_params(axis='y', labelcolor=clra)
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.savefig('an2_colour' + n + '.png')
plt.show()
# Cases where there is a possible correlation with colour intensity or hue.
# color intensity:
# check against : alc, flav, od, proline
# hue:
# check against: ma, tphen, flav, pac, od
def main():
data_file = "wine.data"
dlm = get_delim(data_file)
my_data = parse_file(data_file, dlm)
data_dictionary = lines_to_dict(my_data)
#print(data_dictionary)
plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav', 'Alcohol', 'Flavonoids', '1')
plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od', 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "594479c22cada665dcdc76737085ce342d7d5faf",
"index": 1480,
"step-1": "<mask token>\n\n\ndef convert_type(data_value):\n try:\n return int(data_value)\n except ValueError:\n try:\n return float(data_value)\n except ValueError:\n return data_value\n\n\n<mask token>\n\n\ndef get_delim(sourcefile1):\n print('> executing get_delim function')\n data = open(sourcefile1, 'r')\n my_read_data = data.read()\n if my_read_data.find(',') > 0:\n print(' delimiter: comma')\n return ','\n else:\n print(' delimiter: space')\n return ' '\n print(' ')\n\n\ndef lines_to_dict(lines, header=False):\n print('> executing lines_to_dict')\n column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',\n 'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']\n data_dict = {}\n for idx, column in enumerate(column_titles):\n data_dict[column] = []\n for row in lines:\n data_dict[column] += [row[idx]]\n return data_dict\n\n\n<mask token>\n\n\ndef plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False\n ):\n df = pd.DataFrame.from_dict(dd)\n x = np.fromiter(dd[col1], dtype=float)\n y1 = np.fromiter(dd[col2a], dtype=float)\n y2 = np.fromiter(dd[col2b], dtype=float)\n fig, ax1 = plt.subplots()\n plt.title(label1 + ' by ' + label2a + ' and ' + label2b)\n clra = 'indigo'\n ax1.set_xlabel(label1)\n ax1.set_ylabel(label2a, color=clra)\n ax1.scatter(df[col1], df[col2a], color=clra, marker='^')\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y1, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clra)\n ax1.tick_params(axis='y', labelcolor=clra)\n ax2 = ax1.twinx()\n clrb = 'darkgreen'\n ax2.set_ylabel(label2b, color=clrb)\n ax2.scatter(df[col1], df[col2b], color=clrb)\n ax2.tick_params(axis='y', labelcolor=clrb)\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y2, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clrb)\n ax1.tick_params(axis='y', labelcolor=clra)\n fig.tight_layout()\n plt.savefig('an2_colour' + n + '.png')\n plt.show()\n\n\ndef main():\n data_file = 'wine.data'\n dlm = get_delim(data_file)\n my_data = parse_file(data_file, dlm)\n data_dictionary = lines_to_dict(my_data)\n plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',\n 'Alcohol', 'Flavonoids', '1')\n plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',\n 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef convert_type(data_value):\n try:\n return int(data_value)\n except ValueError:\n try:\n return float(data_value)\n except ValueError:\n return data_value\n\n\n<mask token>\n\n\ndef get_delim(sourcefile1):\n print('> executing get_delim function')\n data = open(sourcefile1, 'r')\n my_read_data = data.read()\n if my_read_data.find(',') > 0:\n print(' delimiter: comma')\n return ','\n else:\n print(' delimiter: space')\n return ' '\n print(' ')\n\n\ndef lines_to_dict(lines, header=False):\n print('> executing lines_to_dict')\n column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',\n 'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']\n data_dict = {}\n for idx, column in enumerate(column_titles):\n data_dict[column] = []\n for row in lines:\n data_dict[column] += [row[idx]]\n return data_dict\n\n\ndef parse_file(data_file, dlm, debug=False):\n print('> executing parse_file')\n assert op.isfile(data_file)\n with open(data_file, 'r') as fhandle:\n csv_reader = csv.reader(fhandle, delimiter=dlm)\n lines = []\n if debug:\n count = 0\n for line in csv_reader:\n if debug:\n if count > 2:\n break\n count += 1\n newline = []\n for value in line:\n newline += [convert_type(value)]\n if len(newline) > 0:\n lines += [newline]\n print('> view a few lines')\n print(' ')\n for line in lines[0:2]:\n print(line)\n print(' ')\n return lines\n\n\ndef plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False\n ):\n df = pd.DataFrame.from_dict(dd)\n x = np.fromiter(dd[col1], dtype=float)\n y1 = np.fromiter(dd[col2a], dtype=float)\n y2 = np.fromiter(dd[col2b], dtype=float)\n fig, ax1 = plt.subplots()\n plt.title(label1 + ' by ' + label2a + ' and ' + label2b)\n clra = 'indigo'\n ax1.set_xlabel(label1)\n ax1.set_ylabel(label2a, color=clra)\n ax1.scatter(df[col1], df[col2a], color=clra, marker='^')\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y1, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clra)\n ax1.tick_params(axis='y', labelcolor=clra)\n ax2 = ax1.twinx()\n clrb = 'darkgreen'\n ax2.set_ylabel(label2b, color=clrb)\n ax2.scatter(df[col1], df[col2b], color=clrb)\n ax2.tick_params(axis='y', labelcolor=clrb)\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y2, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clrb)\n ax1.tick_params(axis='y', labelcolor=clra)\n fig.tight_layout()\n plt.savefig('an2_colour' + n + '.png')\n plt.show()\n\n\ndef main():\n data_file = 'wine.data'\n dlm = get_delim(data_file)\n my_data = parse_file(data_file, dlm)\n data_dictionary = lines_to_dict(my_data)\n plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',\n 'Alcohol', 'Flavonoids', '1')\n plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',\n 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')\n\n\n<mask token>\n",
"step-3": "print(\n '========================================================================================'\n )\nprint(\n '========================================================================================'\n )\nprint('> start of program an2_colour.py')\nprint('> import libraries')\n<mask token>\nprint('> define convert_type function')\n\n\ndef convert_type(data_value):\n try:\n return int(data_value)\n except ValueError:\n try:\n return float(data_value)\n except ValueError:\n return data_value\n\n\nprint('> define get_delim function')\n\n\ndef get_delim(sourcefile1):\n print('> executing get_delim function')\n data = open(sourcefile1, 'r')\n my_read_data = data.read()\n if my_read_data.find(',') > 0:\n print(' delimiter: comma')\n return ','\n else:\n print(' delimiter: space')\n return ' '\n print(' ')\n\n\ndef lines_to_dict(lines, header=False):\n print('> executing lines_to_dict')\n column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',\n 'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']\n data_dict = {}\n for idx, column in enumerate(column_titles):\n data_dict[column] = []\n for row in lines:\n data_dict[column] += [row[idx]]\n return data_dict\n\n\ndef parse_file(data_file, dlm, debug=False):\n print('> executing parse_file')\n assert op.isfile(data_file)\n with open(data_file, 'r') as fhandle:\n csv_reader = csv.reader(fhandle, delimiter=dlm)\n lines = []\n if debug:\n count = 0\n for line in csv_reader:\n if debug:\n if count > 2:\n break\n count += 1\n newline = []\n for value in line:\n newline += [convert_type(value)]\n if len(newline) > 0:\n lines += [newline]\n print('> view a few lines')\n print(' ')\n for line in lines[0:2]:\n print(line)\n print(' ')\n return lines\n\n\ndef plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False\n ):\n df = pd.DataFrame.from_dict(dd)\n x = np.fromiter(dd[col1], dtype=float)\n y1 = np.fromiter(dd[col2a], dtype=float)\n y2 = np.fromiter(dd[col2b], dtype=float)\n fig, ax1 = plt.subplots()\n plt.title(label1 + ' by ' + label2a + ' and ' + label2b)\n clra = 'indigo'\n ax1.set_xlabel(label1)\n ax1.set_ylabel(label2a, color=clra)\n ax1.scatter(df[col1], df[col2a], color=clra, marker='^')\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y1, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clra)\n ax1.tick_params(axis='y', labelcolor=clra)\n ax2 = ax1.twinx()\n clrb = 'darkgreen'\n ax2.set_ylabel(label2b, color=clrb)\n ax2.scatter(df[col1], df[col2b], color=clrb)\n ax2.tick_params(axis='y', labelcolor=clrb)\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y2, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clrb)\n ax1.tick_params(axis='y', labelcolor=clra)\n fig.tight_layout()\n plt.savefig('an2_colour' + n + '.png')\n plt.show()\n\n\ndef main():\n data_file = 'wine.data'\n dlm = get_delim(data_file)\n my_data = parse_file(data_file, dlm)\n data_dictionary = lines_to_dict(my_data)\n plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',\n 'Alcohol', 'Flavonoids', '1')\n plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',\n 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "print(\n '========================================================================================'\n )\nprint(\n '========================================================================================'\n )\nprint('> start of program an2_colour.py')\nprint('> import libraries')\nimport argparse\nimport os.path as op\nimport csv\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\nfrom numpy.polynomial.polynomial import polyfit\nprint('> define convert_type function')\n\n\ndef convert_type(data_value):\n try:\n return int(data_value)\n except ValueError:\n try:\n return float(data_value)\n except ValueError:\n return data_value\n\n\nprint('> define get_delim function')\n\n\ndef get_delim(sourcefile1):\n print('> executing get_delim function')\n data = open(sourcefile1, 'r')\n my_read_data = data.read()\n if my_read_data.find(',') > 0:\n print(' delimiter: comma')\n return ','\n else:\n print(' delimiter: space')\n return ' '\n print(' ')\n\n\ndef lines_to_dict(lines, header=False):\n print('> executing lines_to_dict')\n column_titles = ['class', 'alc', 'ma', 'ash', 'alkash', 'mg', 'tphen',\n 'flav', 'nfphen', 'pac', 'ci', 'hue', 'od', 'proline']\n data_dict = {}\n for idx, column in enumerate(column_titles):\n data_dict[column] = []\n for row in lines:\n data_dict[column] += [row[idx]]\n return data_dict\n\n\ndef parse_file(data_file, dlm, debug=False):\n print('> executing parse_file')\n assert op.isfile(data_file)\n with open(data_file, 'r') as fhandle:\n csv_reader = csv.reader(fhandle, delimiter=dlm)\n lines = []\n if debug:\n count = 0\n for line in csv_reader:\n if debug:\n if count > 2:\n break\n count += 1\n newline = []\n for value in line:\n newline += [convert_type(value)]\n if len(newline) > 0:\n lines += [newline]\n print('> view a few lines')\n print(' ')\n for line in lines[0:2]:\n print(line)\n print(' ')\n return lines\n\n\ndef plot_data3(dd, col1, label1, col2a, col2b, label2a, label2b, n, debug=False\n ):\n df = pd.DataFrame.from_dict(dd)\n x = np.fromiter(dd[col1], dtype=float)\n y1 = np.fromiter(dd[col2a], dtype=float)\n y2 = np.fromiter(dd[col2b], dtype=float)\n fig, ax1 = plt.subplots()\n plt.title(label1 + ' by ' + label2a + ' and ' + label2b)\n clra = 'indigo'\n ax1.set_xlabel(label1)\n ax1.set_ylabel(label2a, color=clra)\n ax1.scatter(df[col1], df[col2a], color=clra, marker='^')\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y1, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clra)\n ax1.tick_params(axis='y', labelcolor=clra)\n ax2 = ax1.twinx()\n clrb = 'darkgreen'\n ax2.set_ylabel(label2b, color=clrb)\n ax2.scatter(df[col1], df[col2b], color=clrb)\n ax2.tick_params(axis='y', labelcolor=clrb)\n xp = np.linspace(np.amin(x), np.amax(x), 100)\n weights = np.polyfit(x, y2, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clrb)\n ax1.tick_params(axis='y', labelcolor=clra)\n fig.tight_layout()\n plt.savefig('an2_colour' + n + '.png')\n plt.show()\n\n\ndef main():\n data_file = 'wine.data'\n dlm = get_delim(data_file)\n my_data = parse_file(data_file, dlm)\n data_dictionary = lines_to_dict(my_data)\n plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav',\n 'Alcohol', 'Flavonoids', '1')\n plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od',\n 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "# program name: an2_colour.py\n\n# no optional arguments: Uses Wine data to display information about the relationship of \n# various attributes with colour and hue \n\nprint('========================================================================================')\nprint('========================================================================================')\n\nprint('> start of program an2_colour.py')\nprint('> import libraries')\n\nimport argparse\nimport os.path as op\nimport csv\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport numpy as np\nfrom numpy.polynomial.polynomial import polyfit\n\nprint('> define convert_type function')\ndef convert_type(data_value):\n try:\n return int(data_value)\n except ValueError:\n try:\n return float(data_value)\n except ValueError:\n return data_value\n\nprint(\"> define get_delim function\")\ndef get_delim(sourcefile1):\n print('> executing get_delim function')\n data = open(sourcefile1, 'r') \n my_read_data = data.read()\n if my_read_data.find(',') > 0:\n print(' delimiter: comma')\n return ','\n else:\n print(' delimiter: space')\n return ' ' \n print(' ')\n\ndef lines_to_dict(lines, header=False):\n print('> executing lines_to_dict')\n # column_titles = ['Class','Alcohol','Malic acid','Ash','Alcalinity of ash','Magnesium','Total phenols','Flavanoids','Nonflavanoid phenols','Proanthocyanins','Color intensity','Hue',\n # 'OD280/OD315 of diluted wines','Proline']\n column_titles = ['class','alc','ma','ash','alkash','mg','tphen','flav','nfphen','pac','ci','hue',\n 'od','proline']\n \n data_dict = {}\n for idx, column in enumerate(column_titles):\n data_dict[column] = []\n for row in lines:\n data_dict[column] += [row[idx]]\n return data_dict\n\ndef parse_file(data_file, dlm, debug=False): # took delimiter out\n print('> executing parse_file')\n # Verify the file exists\n assert(op.isfile(data_file))\n\n # open it as a csv \n with open(data_file, 'r') as fhandle:\n csv_reader = csv.reader(fhandle, delimiter=dlm)\n # Add each line in the file to a list\n lines = []\n if debug:\n count = 0\n for line in csv_reader:\n if debug:\n if count > 2:\n break\n count += 1\n newline = []\n for value in line:\n newline += [convert_type(value)]\n if len(newline) > 0:\n lines += [newline]\n\n print('> view a few lines')\n print(' ')\n for line in lines[0:2]:\n print(line)\n print(' ')\n # Return all the contents of our file\n return lines\n\n\n# class','alc','ma','ash','alkash','mg','tphen','flav','nfphen','pac','ci','hue',\n# 'od','proline\n\n \ndef plot_data3(dd, col1, label1, \n col2a, col2b,\n label2a, label2b, n,\n debug=False):\n df = pd.DataFrame.from_dict(dd) \n x = np.fromiter(dd[col1], dtype=float) # need these for the lines below\n y1 = np.fromiter(dd[col2a], dtype=float)\n y2 = np.fromiter(dd[col2b], dtype=float)\n\n # print(df) \n fig, ax1 = plt.subplots()\n plt.title(label1 + ' by ' + label2a + ' and ' + label2b)\n\n clra = 'indigo'\n ax1.set_xlabel(label1)\n ax1.set_ylabel(label2a, color=clra) # left side\n\n ax1.scatter(df[col1], df[col2a], color=clra, marker = '^')\n\n xp = np.linspace(np.amin(x), np.amax(x), 100) #only works for numpy arrays\n weights = np.polyfit(x, y1, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clra)\n ax1.tick_params(axis='y', labelcolor=clra)\n\n ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis\n\n clrb = 'darkgreen'\n ax2.set_ylabel(label2b, color=clrb) # we already handled the x-label with ax1\n # ax2.plot(df[col1], df[col2b], color=color)\n ax2.scatter(df[col1], df[col2b], color= clrb)\n ax2.tick_params(axis='y', labelcolor=clrb)\n\n xp = np.linspace(np.amin(x), np.amax(x), 100) #only works for numpy arrays\n weights = np.polyfit(x, y2, 1)\n model = np.poly1d(weights)\n plt.plot(xp, model(xp), '-', c=clrb)\n ax1.tick_params(axis='y', labelcolor=clra)\n\n fig.tight_layout() # otherwise the right y-label is slightly clipped\n plt.savefig('an2_colour' + n + '.png')\n plt.show()\n\n# Cases where there is a possible correlation with colour intensity or hue. \n# color intensity:\n# check against : alc, flav, od, proline\n# hue:\n# check against: ma, tphen, flav, pac, od\n\ndef main():\n\n data_file = \"wine.data\"\n dlm = get_delim(data_file) \n my_data = parse_file(data_file, dlm)\n data_dictionary = lines_to_dict(my_data)\n #print(data_dictionary)\n\n plot_data3(data_dictionary, 'ci', 'Colour Intensity', 'alc', 'flav', 'Alcohol', 'Flavonoids', '1')\n plot_data3(data_dictionary, 'hue', 'Hue', 'pac', 'od', 'Proanthocyanidins', 'OD280/OD315 of Diluted Wines', '2')\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
import string
#takes file as input, outputs a dictionary of keys from the file
#file should be in format (apiName, key/id)
#dictionary key = apiName, value = key/id
def getKeys(f):
keys = {}
f = open(f, 'r')
for line in f:
apiInfo = line.split(',')
keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)
keys.pop('apiName', None)
return keys
#print(getKeys('keys.txt'))
|
normal
|
{
"blob_id": "3653c6fce33467600a3eea72578ed995606bfc03",
"index": 4100,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n",
"step-3": "import string\n\n\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n",
"step-4": "import string\n\n#takes file as input, outputs a dictionary of keys from the file\n#file should be in format (apiName, key/id)\n#dictionary key = apiName, value = key/id\ndef getKeys(f):\n keys = {}\n f = open(f, 'r')\n for line in f:\n apiInfo = line.split(',')\n keys[apiInfo[0]] = apiInfo[1].strip(string.whitespace)\n keys.pop('apiName', None)\n return keys\n\n#print(getKeys('keys.txt'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Author: xurongzhong#126.com 技术支持qq群:6089740
# CreateDate: 2018-3-27
# pillow_rotate.py
import glob
import os
from PIL import Image
def rotate(files, dst, value=90):
for file_ in files:
img = Image.open(file_)
img = img.rotate(value)
name = "{}{}{}".format(dst, os.sep, os.path.basename(file_))
img.save(name)
src = r'/home/andrew/code/tmp_photos'
dst = r'/home/andrew/code/tmp_photos2'
common = glob.glob('{}{}*.*'.format(src, os.sep))
rotate(common, dst)
|
normal
|
{
"blob_id": "cd104eec21be8a59e8fb3bd8ab061dd357fc126a",
"index": 667,
"step-1": "<mask token>\n\n\ndef rotate(files, dst, value=90):\n for file_ in files:\n img = Image.open(file_)\n img = img.rotate(value)\n name = '{}{}{}'.format(dst, os.sep, os.path.basename(file_))\n img.save(name)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef rotate(files, dst, value=90):\n for file_ in files:\n img = Image.open(file_)\n img = img.rotate(value)\n name = '{}{}{}'.format(dst, os.sep, os.path.basename(file_))\n img.save(name)\n\n\n<mask token>\nrotate(common, dst)\n",
"step-3": "<mask token>\n\n\ndef rotate(files, dst, value=90):\n for file_ in files:\n img = Image.open(file_)\n img = img.rotate(value)\n name = '{}{}{}'.format(dst, os.sep, os.path.basename(file_))\n img.save(name)\n\n\nsrc = '/home/andrew/code/tmp_photos'\ndst = '/home/andrew/code/tmp_photos2'\ncommon = glob.glob('{}{}*.*'.format(src, os.sep))\nrotate(common, dst)\n",
"step-4": "import glob\nimport os\nfrom PIL import Image\n\n\ndef rotate(files, dst, value=90):\n for file_ in files:\n img = Image.open(file_)\n img = img.rotate(value)\n name = '{}{}{}'.format(dst, os.sep, os.path.basename(file_))\n img.save(name)\n\n\nsrc = '/home/andrew/code/tmp_photos'\ndst = '/home/andrew/code/tmp_photos2'\ncommon = glob.glob('{}{}*.*'.format(src, os.sep))\nrotate(common, dst)\n",
"step-5": "#!/usr/bin/python3\n# -*- coding: utf-8 -*-\n# Author: xurongzhong#126.com 技术支持qq群:6089740\n# CreateDate: 2018-3-27\n# pillow_rotate.py\nimport glob\nimport os \nfrom PIL import Image\n\ndef rotate(files, dst, value=90):\n for file_ in files:\n img = Image.open(file_)\n img = img.rotate(value)\n name = \"{}{}{}\".format(dst, os.sep, os.path.basename(file_))\n img.save(name)\n\nsrc = r'/home/andrew/code/tmp_photos'\ndst = r'/home/andrew/code/tmp_photos2'\n\ncommon = glob.glob('{}{}*.*'.format(src, os.sep)) \nrotate(common, dst)\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class QuoteModel(db.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class QuoteModel(db.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
<|reserved_special_token_0|>
def __repr__(self):
return self.__str__()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class QuoteModel(db.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'
def __repr__(self):
return self.__str__()
<|reserved_special_token_1|>
from api import db
from api.models.author import AuthorModel
class QuoteModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))
quote = db.Column(db.String(255), unique=False)
rate = db.Column(db.Integer)
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'
def __repr__(self):
return self.__str__()
<|reserved_special_token_1|>
from api import db
from api.models.author import AuthorModel
class QuoteModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))
quote = db.Column(db.String(255), unique=False)
rate = db.Column(db.Integer)
def __init__(self, author, quote, rating=1):
self.author = author
self.quote = quote
self.rate = rating
def to_dict(self):
d = {}
for column in self.__table__.columns:
d[column.name] = str(getattr(self, column.name))
return d
def __str__(self):
return f"Quote. Author: {self.author}, q: {self.quote[:10]}..."
def __repr__(self):
return self.__str__()
|
flexible
|
{
"blob_id": "38f41fa87230ddc0b3a8c411b4c569f59f0ea065",
"index": 2509,
"step-1": "<mask token>\n\n\nclass QuoteModel(db.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, author, quote, rating=1):\n self.author = author\n self.quote = quote\n self.rate = rating\n\n def to_dict(self):\n d = {}\n for column in self.__table__.columns:\n d[column.name] = str(getattr(self, column.name))\n return d\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass QuoteModel(db.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, author, quote, rating=1):\n self.author = author\n self.quote = quote\n self.rate = rating\n\n def to_dict(self):\n d = {}\n for column in self.__table__.columns:\n d[column.name] = str(getattr(self, column.name))\n return d\n <mask token>\n\n def __repr__(self):\n return self.__str__()\n",
"step-3": "<mask token>\n\n\nclass QuoteModel(db.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, author, quote, rating=1):\n self.author = author\n self.quote = quote\n self.rate = rating\n\n def to_dict(self):\n d = {}\n for column in self.__table__.columns:\n d[column.name] = str(getattr(self, column.name))\n return d\n\n def __str__(self):\n return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'\n\n def __repr__(self):\n return self.__str__()\n",
"step-4": "from api import db\nfrom api.models.author import AuthorModel\n\n\nclass QuoteModel(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))\n quote = db.Column(db.String(255), unique=False)\n rate = db.Column(db.Integer)\n\n def __init__(self, author, quote, rating=1):\n self.author = author\n self.quote = quote\n self.rate = rating\n\n def to_dict(self):\n d = {}\n for column in self.__table__.columns:\n d[column.name] = str(getattr(self, column.name))\n return d\n\n def __str__(self):\n return f'Quote. Author: {self.author}, q: {self.quote[:10]}...'\n\n def __repr__(self):\n return self.__str__()\n",
"step-5": "from api import db\nfrom api.models.author import AuthorModel\n\n\nclass QuoteModel(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n author_id = db.Column(db.Integer, db.ForeignKey(AuthorModel.id))\n quote = db.Column(db.String(255), unique=False)\n rate = db.Column(db.Integer)\n\n def __init__(self, author, quote, rating=1):\n self.author = author\n self.quote = quote\n self.rate = rating\n\n def to_dict(self):\n d = {}\n for column in self.__table__.columns:\n d[column.name] = str(getattr(self, column.name))\n return d\n\n def __str__(self):\n return f\"Quote. Author: {self.author}, q: {self.quote[:10]}...\"\n\n def __repr__(self):\n return self.__str__()\n",
"step-ids": [
3,
4,
5,
7,
8
]
}
|
[
3,
4,
5,
7,
8
] |
<|reserved_special_token_0|>
class Formation(db):
<|reserved_special_token_0|>
query: Query
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def create(filiere: str, lieu: str, niveau: str):
return Formation(filiere=filiere, lieu=lieu, niveau=niveau)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Formation(db):
<|reserved_special_token_0|>
query: Query
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@staticmethod
def create(filiere: str, lieu: str, niveau: str):
return Formation(filiere=filiere, lieu=lieu, niveau=niveau)
def to_json(self):
return {'id': self.id_form, 'branch': self.filiere, 'location':
self.lieu, 'level': self.niveau}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Formation(db):
__tablename__ = 'formation'
query: Query
id_form = Column(Integer, primary_key=True)
filiere = Column(String, nullable=False)
lieu = Column(String, nullable=False)
niveau = Column(String, nullable=False)
@staticmethod
def create(filiere: str, lieu: str, niveau: str):
return Formation(filiere=filiere, lieu=lieu, niveau=niveau)
def to_json(self):
return {'id': self.id_form, 'branch': self.filiere, 'location':
self.lieu, 'level': self.niveau}
<|reserved_special_token_1|>
from sqlalchemy import Integer, String, Column
from sqlalchemy.orm import Query
from server import db
class Formation(db):
__tablename__ = 'formation'
query: Query
id_form = Column(Integer, primary_key=True)
filiere = Column(String, nullable=False)
lieu = Column(String, nullable=False)
niveau = Column(String, nullable=False)
@staticmethod
def create(filiere: str, lieu: str, niveau: str):
return Formation(filiere=filiere, lieu=lieu, niveau=niveau)
def to_json(self):
return {'id': self.id_form, 'branch': self.filiere, 'location':
self.lieu, 'level': self.niveau}
<|reserved_special_token_1|>
from sqlalchemy import Integer, String, Column
from sqlalchemy.orm import Query
from server import db
class Formation(db):
__tablename__ = "formation"
query: Query
id_form = Column(Integer, primary_key=True)
filiere = Column(String, nullable=False)
lieu = Column(String, nullable=False)
niveau = Column(String, nullable=False)
@staticmethod
def create(filiere: str, lieu: str, niveau: str):
return Formation(filiere=filiere, lieu=lieu, niveau=niveau)
def to_json(self):
return {
'id': self.id_form,
'branch': self.filiere,
'location': self.lieu,
'level': self.niveau,
}
|
flexible
|
{
"blob_id": "fff70312fa7c3259cf4c3d9e7ebd8ca5b9a56887",
"index": 2714,
"step-1": "<mask token>\n\n\nclass Formation(db):\n <mask token>\n query: Query\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def create(filiere: str, lieu: str, niveau: str):\n return Formation(filiere=filiere, lieu=lieu, niveau=niveau)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Formation(db):\n <mask token>\n query: Query\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @staticmethod\n def create(filiere: str, lieu: str, niveau: str):\n return Formation(filiere=filiere, lieu=lieu, niveau=niveau)\n\n def to_json(self):\n return {'id': self.id_form, 'branch': self.filiere, 'location':\n self.lieu, 'level': self.niveau}\n",
"step-3": "<mask token>\n\n\nclass Formation(db):\n __tablename__ = 'formation'\n query: Query\n id_form = Column(Integer, primary_key=True)\n filiere = Column(String, nullable=False)\n lieu = Column(String, nullable=False)\n niveau = Column(String, nullable=False)\n\n @staticmethod\n def create(filiere: str, lieu: str, niveau: str):\n return Formation(filiere=filiere, lieu=lieu, niveau=niveau)\n\n def to_json(self):\n return {'id': self.id_form, 'branch': self.filiere, 'location':\n self.lieu, 'level': self.niveau}\n",
"step-4": "from sqlalchemy import Integer, String, Column\nfrom sqlalchemy.orm import Query\nfrom server import db\n\n\nclass Formation(db):\n __tablename__ = 'formation'\n query: Query\n id_form = Column(Integer, primary_key=True)\n filiere = Column(String, nullable=False)\n lieu = Column(String, nullable=False)\n niveau = Column(String, nullable=False)\n\n @staticmethod\n def create(filiere: str, lieu: str, niveau: str):\n return Formation(filiere=filiere, lieu=lieu, niveau=niveau)\n\n def to_json(self):\n return {'id': self.id_form, 'branch': self.filiere, 'location':\n self.lieu, 'level': self.niveau}\n",
"step-5": "from sqlalchemy import Integer, String, Column\nfrom sqlalchemy.orm import Query\nfrom server import db\n\nclass Formation(db):\n __tablename__ = \"formation\"\n query: Query\n\n id_form = Column(Integer, primary_key=True)\n filiere = Column(String, nullable=False)\n lieu = Column(String, nullable=False)\n niveau = Column(String, nullable=False)\n\n @staticmethod\n def create(filiere: str, lieu: str, niveau: str):\n return Formation(filiere=filiere, lieu=lieu, niveau=niveau)\n\n def to_json(self):\n return {\n 'id': self.id_form,\n 'branch': self.filiere,\n 'location': self.lieu,\n 'level': self.niveau,\n }\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import requests, shutil, os, glob
from zipfile import ZipFile
import pandas as pd
from xlrd import open_workbook
import csv
# zipfilename = 'desiya_hotels'
# try:
# # downloading zip file
# r = requests.get('http://staticstore.travelguru.com/testdump/1300001176/Excel.zip', auth=('testdump', 'testdump'), verify=False,stream=True) #Note web_link is https://
# r.raw.decode_content = True
# with open(os.path.join(os.path.dirname(__file__), 'storage/{}.zip'.format(zipfilename)), 'wb') as f:
# shutil.copyfileobj(r.raw, f)
#
# #extracting zip file as xls file
# with ZipFile(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/desiya*.zip'))[0], 'r') as zip:
# zip.extractall(os.path.join(os.path.dirname(__file__), 'storage/'))
# #Rename xls file name as "desiya_hotels"
# if glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[0-9].xls')):
# for filename in glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[a-zA-z].xls')):
# os.remove(filename)
# os.rename(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[0-9].xls'))[0], os.path.join(os.path.dirname(__file__),'storage/{}.xls'.format(zipfilename)))
# else:
# print('unzipped xls file is not found in storare folder')
# except Exception as e:
# print("Error while downloading zip file")
#read xls file
# xls = pd.ExcelFile(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/desiya*.xls'))[0])
# df = pd.read_excel(xls, sheet_name=0, index_col=None)
# print(df['Name'])
# print(df.head(5))
# for index, row in df.iterrows():
# print(index, row[3])
#convert xls to csvc
# df.to_csv(os.path.join(os.path.dirname(__file__),'storage/{}'.format('robot.csv')), encoding='utf-8', index=False)
#convert xls file to csv using xlrd module
xlsfile = glob.glob(os.path.join(os.path.dirname(__file__), 'storage/robot*.xls'))[0]
wb = open_workbook(xlsfile)
sheet = wb.sheet_by_name('robot_list')
with open(os.path.join(os.path.dirname(__file__), 'storage/robot_list.csv'), "w") as file:
writer = csv.writer(file, delimiter=",")
headers = [cell.value for cell in sheet.row(0)]
writer.writerow(headers)
for i in range(1, sheet.nrows):
rowvalue_list = [str(cell.value).strip() if cell.value else None for cell in sheet.row(i)]
writer.writerow(rowvalue_list)
|
normal
|
{
"blob_id": "1ef9df43725196904ec6c0c881f4a1204174b176",
"index": 375,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open(os.path.join(os.path.dirname(__file__), 'storage/robot_list.csv'),\n 'w') as file:\n writer = csv.writer(file, delimiter=',')\n headers = [cell.value for cell in sheet.row(0)]\n writer.writerow(headers)\n for i in range(1, sheet.nrows):\n rowvalue_list = [(str(cell.value).strip() if cell.value else None) for\n cell in sheet.row(i)]\n writer.writerow(rowvalue_list)\n",
"step-3": "<mask token>\nxlsfile = glob.glob(os.path.join(os.path.dirname(__file__),\n 'storage/robot*.xls'))[0]\nwb = open_workbook(xlsfile)\nsheet = wb.sheet_by_name('robot_list')\nwith open(os.path.join(os.path.dirname(__file__), 'storage/robot_list.csv'),\n 'w') as file:\n writer = csv.writer(file, delimiter=',')\n headers = [cell.value for cell in sheet.row(0)]\n writer.writerow(headers)\n for i in range(1, sheet.nrows):\n rowvalue_list = [(str(cell.value).strip() if cell.value else None) for\n cell in sheet.row(i)]\n writer.writerow(rowvalue_list)\n",
"step-4": "import requests, shutil, os, glob\nfrom zipfile import ZipFile\nimport pandas as pd\nfrom xlrd import open_workbook\nimport csv\nxlsfile = glob.glob(os.path.join(os.path.dirname(__file__),\n 'storage/robot*.xls'))[0]\nwb = open_workbook(xlsfile)\nsheet = wb.sheet_by_name('robot_list')\nwith open(os.path.join(os.path.dirname(__file__), 'storage/robot_list.csv'),\n 'w') as file:\n writer = csv.writer(file, delimiter=',')\n headers = [cell.value for cell in sheet.row(0)]\n writer.writerow(headers)\n for i in range(1, sheet.nrows):\n rowvalue_list = [(str(cell.value).strip() if cell.value else None) for\n cell in sheet.row(i)]\n writer.writerow(rowvalue_list)\n",
"step-5": "\n\nimport requests, shutil, os, glob\nfrom zipfile import ZipFile\nimport pandas as pd\nfrom xlrd import open_workbook\nimport csv\n\n# zipfilename = 'desiya_hotels'\n# try:\n# # downloading zip file\n# r = requests.get('http://staticstore.travelguru.com/testdump/1300001176/Excel.zip', auth=('testdump', 'testdump'), verify=False,stream=True) #Note web_link is https://\n# r.raw.decode_content = True\n# with open(os.path.join(os.path.dirname(__file__), 'storage/{}.zip'.format(zipfilename)), 'wb') as f:\n# shutil.copyfileobj(r.raw, f)\n#\n# #extracting zip file as xls file\n# with ZipFile(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/desiya*.zip'))[0], 'r') as zip:\n# zip.extractall(os.path.join(os.path.dirname(__file__), 'storage/'))\n# #Rename xls file name as \"desiya_hotels\"\n# if glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[0-9].xls')):\n# for filename in glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[a-zA-z].xls')):\n# os.remove(filename)\n# os.rename(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/*[0-9].xls'))[0], os.path.join(os.path.dirname(__file__),'storage/{}.xls'.format(zipfilename)))\n# else:\n# print('unzipped xls file is not found in storare folder')\n# except Exception as e:\n# print(\"Error while downloading zip file\")\n\n#read xls file\n# xls = pd.ExcelFile(glob.glob(os.path.join(os.path.dirname(__file__), 'storage/desiya*.xls'))[0])\n# df = pd.read_excel(xls, sheet_name=0, index_col=None)\n# print(df['Name'])\n# print(df.head(5))\n# for index, row in df.iterrows():\n# print(index, row[3])\n\n#convert xls to csvc\n# df.to_csv(os.path.join(os.path.dirname(__file__),'storage/{}'.format('robot.csv')), encoding='utf-8', index=False)\n\n\n#convert xls file to csv using xlrd module\nxlsfile = glob.glob(os.path.join(os.path.dirname(__file__), 'storage/robot*.xls'))[0]\nwb = open_workbook(xlsfile)\nsheet = wb.sheet_by_name('robot_list')\nwith open(os.path.join(os.path.dirname(__file__), 'storage/robot_list.csv'), \"w\") as file:\n writer = csv.writer(file, delimiter=\",\")\n headers = [cell.value for cell in sheet.row(0)]\n writer.writerow(headers)\n for i in range(1, sheet.nrows):\n rowvalue_list = [str(cell.value).strip() if cell.value else None for cell in sheet.row(i)]\n writer.writerow(rowvalue_list)\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.2.2 on 2021-05-07 08:01
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='teams',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('discipline', models.CharField(max_length=50)),
('amount', models.IntegerField()),
],
options={
'ordering': ['id'],
'unique_together': {('name', 'discipline', 'amount')},
},
),
]
|
normal
|
{
"blob_id": "e72962b644fab148741eb1c528d48ada45a43e51",
"index": 3978,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='teams', fields=[('id',\n models.AutoField(primary_key=True, serialize=False)), ('name',\n models.CharField(max_length=50)), ('discipline', models.CharField(\n max_length=50)), ('amount', models.IntegerField())], options={\n 'ordering': ['id'], 'unique_together': {('name', 'discipline',\n 'amount')}})]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='teams', fields=[('id',\n models.AutoField(primary_key=True, serialize=False)), ('name',\n models.CharField(max_length=50)), ('discipline', models.CharField(\n max_length=50)), ('amount', models.IntegerField())], options={\n 'ordering': ['id'], 'unique_together': {('name', 'discipline',\n 'amount')}})]\n",
"step-5": "# Generated by Django 3.2.2 on 2021-05-07 08:01\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='teams',\n fields=[\n ('id', models.AutoField(primary_key=True, serialize=False)),\n ('name', models.CharField(max_length=50)),\n ('discipline', models.CharField(max_length=50)),\n ('amount', models.IntegerField()),\n ],\n options={\n 'ordering': ['id'],\n 'unique_together': {('name', 'discipline', 'amount')},\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds all assets under src/rawassets/, writing the results to assets/.
Finds the flatbuffer compiler and cwebp tool and then uses them to convert the
JSON files to flatbuffer binary files and the png files to webp files so that
they can be loaded by the game. This script also includes various 'make' style
rules. If you just want to build the flatbuffer binaries you can pass
'flatbuffer' as an argument, or if you want to just build the webp files you can
pass 'cwebp' as an argument. Additionally, if you would like to clean all
generated files, you can call this script with the argument 'clean'.
"""
import distutils.spawn
import glob
import os
import platform
import subprocess
import sys
# The project root directory, which is one level up from this script's
# directory.
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
PREBUILTS_ROOT = os.path.abspath(os.path.join(os.path.join(PROJECT_ROOT),
os.path.pardir, os.path.pardir,
os.path.pardir, os.path.pardir,
'prebuilts'))
# Directories that may contains the FlatBuffers compiler.
FLATBUFFERS_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
]
# Directory that contains the cwebp tool.
CWEBP_BINARY_IN_PATH = distutils.spawn.find_executable('cwebp')
CWEBP_PATHS = [
os.path.join(PROJECT_ROOT, 'bin'),
os.path.join(PROJECT_ROOT, 'bin', 'Release'),
os.path.join(PROJECT_ROOT, 'bin', 'Debug'),
os.path.join(PREBUILTS_ROOT, 'libwebp',
'%s-x86' % platform.system().lower(),
'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),
os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',
]
# Directory to place processed assets.
ASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')
# Directory where unprocessed assets can be found.
RAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')
# Directory where processed sound flatbuffer data can be found.
SOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')
# Directory where unprocessed sound flatbuffer data can be found.
RAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')
# Directory where processed material flatbuffer data can be found.
MATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')
# Directory where unprocessed material flatbuffer data can be found.
RAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')
# Directory where processed textures can be found.
TEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')
# Directory where unprocessed textures can be found.
RAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')
# Directory where unprocessed assets can be found.
SCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')
# Windows uses the .exe extension on executables.
EXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''
# Name of the flatbuffer executable.
FLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION
# Name of the cwebp executable.
CWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION
# What level of quality we want to apply to the webp files.
# Ranges from 0 to 100.
WEBP_QUALITY = 90
def processed_json_dir(path):
"""Take the path to a raw json asset and convert it to target directory."""
return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))
class FlatbuffersConversionData(object):
"""Holds data needed to convert a set of json files to flatbuffer binaries.
Attributes:
schema: The path to the flatbuffer schema file.
input_files: A list of input files to convert.
output_path: The path to the output directory where the converted files will
be placed.
"""
def __init__(self, schema, input_files, output_path):
"""Initializes this object's schema, input_files and output_path."""
self.schema = schema
self.input_files = input_files
self.output_path = output_path
# A list of json files and their schemas that will be converted to binary files
# by the flatbuffer compiler.
FLATBUFFERS_CONVERSION_DATA = [
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'config.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),
input_files=[os.path.join(RAW_ASSETS_PATH,
'character_state_machine_def.json')],
output_path=ASSETS_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),
input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),
output_path=SOUND_PATH),
FlatbuffersConversionData(
schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),
input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),
output_path=MATERIAL_PATH)
]
def processed_texture_path(path):
"""Take the path to a raw png asset and convert it to target webp path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')
# PNG files to convert to webp.
PNG_TEXTURES = {
'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),
'output_files': [processed_texture_path(png_path)
for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,
'*.png'))]
}
def find_executable(name, paths):
"""Searches for a file with named `name` in the given paths and returns it."""
for path in paths:
full_path = os.path.join(path, name)
if os.path.isfile(full_path):
return full_path
# If not found, just assume it's in the PATH.
return name
# Location of FlatBuffers compiler.
FLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)
# Location of webp compression tool.
CWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)
class BuildError(Exception):
"""Error indicating there was a problem building assets."""
def __init__(self, argv, error_code):
Exception.__init__(self)
self.argv = argv
self.error_code = error_code
def run_subprocess(argv):
process = subprocess.Popen(argv)
process.wait()
if process.returncode:
raise BuildError(argv, process.returncode)
def convert_json_to_flatbuffer_binary(json, schema, out_dir):
"""Run the flatbuffer compiler on the given json file and schema.
Args:
json: The path to the json file to convert to a flatbuffer binary.
schema: The path to the schema to use in the conversion process.
out_dir: The directory to write the flatbuffer binary.
Raises:
BuildError: Process return code was nonzero.
"""
command = [FLATC, '-o', out_dir, '-b', schema, json]
run_subprocess(command)
def convert_png_image_to_webp(png, out, quality=80):
"""Run the webp converter on the given png file.
Args:
png: The path to the png file to convert into a webp file.
out: The path of the webp to write to.
quality: The quality of the processed image, where quality is between 0
(poor) to 100 (very good). Typical value is around 80.
Raises:
BuildError: Process return code was nonzero.
"""
command = [CWEBP, '-q', str(quality), png, '-o', out]
run_subprocess(command)
def needs_rebuild(source, target):
"""Checks if the source file needs to be rebuilt.
Args:
source: The source file to be compared.
target: The target file which we may need to rebuild.
Returns:
True if the source file is newer than the target, or if the target file does
not exist.
"""
return not os.path.isfile(target) or (
os.path.getmtime(source) > os.path.getmtime(target))
def processed_json_path(path):
"""Take the path to a raw json asset and convert it to target bin path."""
return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')
def generate_flatbuffer_binaries():
"""Run the flatbuffer compiler on the all of the flatbuffer json files."""
for element in FLATBUFFERS_CONVERSION_DATA:
schema = element.schema
output_path = element.output_path
if not os.path.exists(output_path):
os.makedirs(output_path)
for json in element.input_files:
target = processed_json_path(json)
if needs_rebuild(json, target) or needs_rebuild(schema, target):
convert_json_to_flatbuffer_binary(
json, schema, output_path)
def generate_webp_textures():
"""Run the webp converter on off of the png files."""
input_files = PNG_TEXTURES['input_files']
output_files = PNG_TEXTURES['output_files']
if not os.path.exists(TEXTURE_PATH):
os.makedirs(TEXTURE_PATH)
for png, out in zip(input_files, output_files):
if needs_rebuild(png, out):
convert_png_image_to_webp(png, out, WEBP_QUALITY)
def clean_webp_textures():
"""Delete all the processed webp textures."""
for webp in PNG_TEXTURES['output_files']:
if os.path.isfile(webp):
os.remove(webp)
def clean_flatbuffer_binaries():
"""Delete all the processed flatbuffer binaries."""
for element in FLATBUFFERS_CONVERSION_DATA:
for json in element.input_files:
path = processed_json_path(json)
if os.path.isfile(path):
os.remove(path)
def clean():
"""Delete all the processed files."""
clean_flatbuffer_binaries()
clean_webp_textures()
def handle_build_error(error):
"""Prints an error message to stderr for BuildErrors."""
sys.stderr.write('Error running command `%s`. Returned %s.\n' % (
' '.join(error.argv), str(error.error_code)))
def main(argv):
"""Builds or cleans the assets needed for the game.
To build all assets, either call this script without any arguments. Or
alternatively, call it with the argument 'all'. To just convert the flatbuffer
json files, call it with 'flatbuffers'. Likewise to convert the png files to
webp files, call it with 'webp'. To clean all converted files, call it with
'clean'.
Args:
argv: The command line argument containing which command to run.
Returns:
Returns 0 on success.
"""
target = argv[1] if len(argv) >= 2 else 'all'
if target not in ('all', 'flatbuffers', 'webp', 'clean'):
sys.stderr.write('No rule to build target %s.\n' % target)
if target in ('all', 'flatbuffers'):
try:
generate_flatbuffer_binaries()
except BuildError as error:
handle_build_error(error)
return 1
if target in ('all', 'webp'):
try:
generate_webp_textures()
except BuildError as error:
handle_build_error(error)
return 1
if target == 'clean':
try:
clean()
except OSError as error:
sys.stderr.write('Error cleaning: %s' % str(error))
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
normal
|
{
"blob_id": "4989db28db0f823a54ff0942fbc40fc4640da38f",
"index": 3224,
"step-1": "<mask token>\n\n\nclass FlatbuffersConversionData(object):\n \"\"\"Holds data needed to convert a set of json files to flatbuffer binaries.\n\n Attributes:\n schema: The path to the flatbuffer schema file.\n input_files: A list of input files to convert.\n output_path: The path to the output directory where the converted files will\n be placed.\n \"\"\"\n\n def __init__(self, schema, input_files, output_path):\n \"\"\"Initializes this object's schema, input_files and output_path.\"\"\"\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path\n\n\n<mask token>\n\n\ndef find_executable(name, paths):\n \"\"\"Searches for a file with named `name` in the given paths and returns it.\"\"\"\n for path in paths:\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n return full_path\n return name\n\n\n<mask token>\n\n\nclass BuildError(Exception):\n \"\"\"Error indicating there was a problem building assets.\"\"\"\n\n def __init__(self, argv, error_code):\n Exception.__init__(self)\n self.argv = argv\n self.error_code = error_code\n\n\ndef run_subprocess(argv):\n process = subprocess.Popen(argv)\n process.wait()\n if process.returncode:\n raise BuildError(argv, process.returncode)\n\n\ndef convert_json_to_flatbuffer_binary(json, schema, out_dir):\n \"\"\"Run the flatbuffer compiler on the given json file and schema.\n\n Args:\n json: The path to the json file to convert to a flatbuffer binary.\n schema: The path to the schema to use in the conversion process.\n out_dir: The directory to write the flatbuffer binary.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [FLATC, '-o', out_dir, '-b', schema, json]\n run_subprocess(command)\n\n\ndef convert_png_image_to_webp(png, out, quality=80):\n \"\"\"Run the webp converter on the given png file.\n\n Args:\n png: The path to the png file to convert into a webp file.\n out: The path of the webp to write to.\n quality: The quality of the processed image, where quality is between 0\n (poor) to 100 (very good). Typical value is around 80.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [CWEBP, '-q', str(quality), png, '-o', out]\n run_subprocess(command)\n\n\ndef needs_rebuild(source, target):\n \"\"\"Checks if the source file needs to be rebuilt.\n\n Args:\n source: The source file to be compared.\n target: The target file which we may need to rebuild.\n\n Returns:\n True if the source file is newer than the target, or if the target file does\n not exist.\n \"\"\"\n return not os.path.isfile(target) or os.path.getmtime(source\n ) > os.path.getmtime(target)\n\n\ndef processed_json_path(path):\n \"\"\"Take the path to a raw json asset and convert it to target bin path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')\n\n\n<mask token>\n\n\ndef generate_webp_textures():\n \"\"\"Run the webp converter on off of the png files.\"\"\"\n input_files = PNG_TEXTURES['input_files']\n output_files = PNG_TEXTURES['output_files']\n if not os.path.exists(TEXTURE_PATH):\n os.makedirs(TEXTURE_PATH)\n for png, out in zip(input_files, output_files):\n if needs_rebuild(png, out):\n convert_png_image_to_webp(png, out, WEBP_QUALITY)\n\n\ndef clean_webp_textures():\n \"\"\"Delete all the processed webp textures.\"\"\"\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)\n\n\n<mask token>\n\n\ndef handle_build_error(error):\n \"\"\"Prints an error message to stderr for BuildErrors.\"\"\"\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (' '.\n join(error.argv), str(error.error_code)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef processed_json_dir(path):\n \"\"\"Take the path to a raw json asset and convert it to target directory.\"\"\"\n return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))\n\n\nclass FlatbuffersConversionData(object):\n \"\"\"Holds data needed to convert a set of json files to flatbuffer binaries.\n\n Attributes:\n schema: The path to the flatbuffer schema file.\n input_files: A list of input files to convert.\n output_path: The path to the output directory where the converted files will\n be placed.\n \"\"\"\n\n def __init__(self, schema, input_files, output_path):\n \"\"\"Initializes this object's schema, input_files and output_path.\"\"\"\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path\n\n\n<mask token>\n\n\ndef find_executable(name, paths):\n \"\"\"Searches for a file with named `name` in the given paths and returns it.\"\"\"\n for path in paths:\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n return full_path\n return name\n\n\n<mask token>\n\n\nclass BuildError(Exception):\n \"\"\"Error indicating there was a problem building assets.\"\"\"\n\n def __init__(self, argv, error_code):\n Exception.__init__(self)\n self.argv = argv\n self.error_code = error_code\n\n\ndef run_subprocess(argv):\n process = subprocess.Popen(argv)\n process.wait()\n if process.returncode:\n raise BuildError(argv, process.returncode)\n\n\ndef convert_json_to_flatbuffer_binary(json, schema, out_dir):\n \"\"\"Run the flatbuffer compiler on the given json file and schema.\n\n Args:\n json: The path to the json file to convert to a flatbuffer binary.\n schema: The path to the schema to use in the conversion process.\n out_dir: The directory to write the flatbuffer binary.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [FLATC, '-o', out_dir, '-b', schema, json]\n run_subprocess(command)\n\n\ndef convert_png_image_to_webp(png, out, quality=80):\n \"\"\"Run the webp converter on the given png file.\n\n Args:\n png: The path to the png file to convert into a webp file.\n out: The path of the webp to write to.\n quality: The quality of the processed image, where quality is between 0\n (poor) to 100 (very good). Typical value is around 80.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [CWEBP, '-q', str(quality), png, '-o', out]\n run_subprocess(command)\n\n\ndef needs_rebuild(source, target):\n \"\"\"Checks if the source file needs to be rebuilt.\n\n Args:\n source: The source file to be compared.\n target: The target file which we may need to rebuild.\n\n Returns:\n True if the source file is newer than the target, or if the target file does\n not exist.\n \"\"\"\n return not os.path.isfile(target) or os.path.getmtime(source\n ) > os.path.getmtime(target)\n\n\ndef processed_json_path(path):\n \"\"\"Take the path to a raw json asset and convert it to target bin path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')\n\n\n<mask token>\n\n\ndef generate_webp_textures():\n \"\"\"Run the webp converter on off of the png files.\"\"\"\n input_files = PNG_TEXTURES['input_files']\n output_files = PNG_TEXTURES['output_files']\n if not os.path.exists(TEXTURE_PATH):\n os.makedirs(TEXTURE_PATH)\n for png, out in zip(input_files, output_files):\n if needs_rebuild(png, out):\n convert_png_image_to_webp(png, out, WEBP_QUALITY)\n\n\ndef clean_webp_textures():\n \"\"\"Delete all the processed webp textures.\"\"\"\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)\n\n\n<mask token>\n\n\ndef clean():\n \"\"\"Delete all the processed files.\"\"\"\n clean_flatbuffer_binaries()\n clean_webp_textures()\n\n\ndef handle_build_error(error):\n \"\"\"Prints an error message to stderr for BuildErrors.\"\"\"\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (' '.\n join(error.argv), str(error.error_code)))\n\n\ndef main(argv):\n \"\"\"Builds or cleans the assets needed for the game.\n\n To build all assets, either call this script without any arguments. Or\n alternatively, call it with the argument 'all'. To just convert the flatbuffer\n json files, call it with 'flatbuffers'. Likewise to convert the png files to\n webp files, call it with 'webp'. To clean all converted files, call it with\n 'clean'.\n\n Args:\n argv: The command line argument containing which command to run.\n\n Returns:\n Returns 0 on success.\n \"\"\"\n target = argv[1] if len(argv) >= 2 else 'all'\n if target not in ('all', 'flatbuffers', 'webp', 'clean'):\n sys.stderr.write('No rule to build target %s.\\n' % target)\n if target in ('all', 'flatbuffers'):\n try:\n generate_flatbuffer_binaries()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target in ('all', 'webp'):\n try:\n generate_webp_textures()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target == 'clean':\n try:\n clean()\n except OSError as error:\n sys.stderr.write('Error cleaning: %s' % str(error))\n return 1\n return 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef processed_json_dir(path):\n \"\"\"Take the path to a raw json asset and convert it to target directory.\"\"\"\n return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))\n\n\nclass FlatbuffersConversionData(object):\n \"\"\"Holds data needed to convert a set of json files to flatbuffer binaries.\n\n Attributes:\n schema: The path to the flatbuffer schema file.\n input_files: A list of input files to convert.\n output_path: The path to the output directory where the converted files will\n be placed.\n \"\"\"\n\n def __init__(self, schema, input_files, output_path):\n \"\"\"Initializes this object's schema, input_files and output_path.\"\"\"\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path\n\n\n<mask token>\n\n\ndef processed_texture_path(path):\n \"\"\"Take the path to a raw png asset and convert it to target webp path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')\n\n\n<mask token>\n\n\ndef find_executable(name, paths):\n \"\"\"Searches for a file with named `name` in the given paths and returns it.\"\"\"\n for path in paths:\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n return full_path\n return name\n\n\n<mask token>\n\n\nclass BuildError(Exception):\n \"\"\"Error indicating there was a problem building assets.\"\"\"\n\n def __init__(self, argv, error_code):\n Exception.__init__(self)\n self.argv = argv\n self.error_code = error_code\n\n\ndef run_subprocess(argv):\n process = subprocess.Popen(argv)\n process.wait()\n if process.returncode:\n raise BuildError(argv, process.returncode)\n\n\ndef convert_json_to_flatbuffer_binary(json, schema, out_dir):\n \"\"\"Run the flatbuffer compiler on the given json file and schema.\n\n Args:\n json: The path to the json file to convert to a flatbuffer binary.\n schema: The path to the schema to use in the conversion process.\n out_dir: The directory to write the flatbuffer binary.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [FLATC, '-o', out_dir, '-b', schema, json]\n run_subprocess(command)\n\n\ndef convert_png_image_to_webp(png, out, quality=80):\n \"\"\"Run the webp converter on the given png file.\n\n Args:\n png: The path to the png file to convert into a webp file.\n out: The path of the webp to write to.\n quality: The quality of the processed image, where quality is between 0\n (poor) to 100 (very good). Typical value is around 80.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [CWEBP, '-q', str(quality), png, '-o', out]\n run_subprocess(command)\n\n\ndef needs_rebuild(source, target):\n \"\"\"Checks if the source file needs to be rebuilt.\n\n Args:\n source: The source file to be compared.\n target: The target file which we may need to rebuild.\n\n Returns:\n True if the source file is newer than the target, or if the target file does\n not exist.\n \"\"\"\n return not os.path.isfile(target) or os.path.getmtime(source\n ) > os.path.getmtime(target)\n\n\ndef processed_json_path(path):\n \"\"\"Take the path to a raw json asset and convert it to target bin path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')\n\n\ndef generate_flatbuffer_binaries():\n \"\"\"Run the flatbuffer compiler on the all of the flatbuffer json files.\"\"\"\n for element in FLATBUFFERS_CONVERSION_DATA:\n schema = element.schema\n output_path = element.output_path\n if not os.path.exists(output_path):\n os.makedirs(output_path)\n for json in element.input_files:\n target = processed_json_path(json)\n if needs_rebuild(json, target) or needs_rebuild(schema, target):\n convert_json_to_flatbuffer_binary(json, schema, output_path)\n\n\ndef generate_webp_textures():\n \"\"\"Run the webp converter on off of the png files.\"\"\"\n input_files = PNG_TEXTURES['input_files']\n output_files = PNG_TEXTURES['output_files']\n if not os.path.exists(TEXTURE_PATH):\n os.makedirs(TEXTURE_PATH)\n for png, out in zip(input_files, output_files):\n if needs_rebuild(png, out):\n convert_png_image_to_webp(png, out, WEBP_QUALITY)\n\n\ndef clean_webp_textures():\n \"\"\"Delete all the processed webp textures.\"\"\"\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)\n\n\n<mask token>\n\n\ndef clean():\n \"\"\"Delete all the processed files.\"\"\"\n clean_flatbuffer_binaries()\n clean_webp_textures()\n\n\ndef handle_build_error(error):\n \"\"\"Prints an error message to stderr for BuildErrors.\"\"\"\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (' '.\n join(error.argv), str(error.error_code)))\n\n\ndef main(argv):\n \"\"\"Builds or cleans the assets needed for the game.\n\n To build all assets, either call this script without any arguments. Or\n alternatively, call it with the argument 'all'. To just convert the flatbuffer\n json files, call it with 'flatbuffers'. Likewise to convert the png files to\n webp files, call it with 'webp'. To clean all converted files, call it with\n 'clean'.\n\n Args:\n argv: The command line argument containing which command to run.\n\n Returns:\n Returns 0 on success.\n \"\"\"\n target = argv[1] if len(argv) >= 2 else 'all'\n if target not in ('all', 'flatbuffers', 'webp', 'clean'):\n sys.stderr.write('No rule to build target %s.\\n' % target)\n if target in ('all', 'flatbuffers'):\n try:\n generate_flatbuffer_binaries()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target in ('all', 'webp'):\n try:\n generate_webp_textures()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target == 'clean':\n try:\n clean()\n except OSError as error:\n sys.stderr.write('Error cleaning: %s' % str(error))\n return 1\n return 0\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef processed_json_dir(path):\n \"\"\"Take the path to a raw json asset and convert it to target directory.\"\"\"\n return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))\n\n\nclass FlatbuffersConversionData(object):\n \"\"\"Holds data needed to convert a set of json files to flatbuffer binaries.\n\n Attributes:\n schema: The path to the flatbuffer schema file.\n input_files: A list of input files to convert.\n output_path: The path to the output directory where the converted files will\n be placed.\n \"\"\"\n\n def __init__(self, schema, input_files, output_path):\n \"\"\"Initializes this object's schema, input_files and output_path.\"\"\"\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path\n\n\n<mask token>\n\n\ndef processed_texture_path(path):\n \"\"\"Take the path to a raw png asset and convert it to target webp path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')\n\n\n<mask token>\n\n\ndef find_executable(name, paths):\n \"\"\"Searches for a file with named `name` in the given paths and returns it.\"\"\"\n for path in paths:\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n return full_path\n return name\n\n\n<mask token>\n\n\nclass BuildError(Exception):\n \"\"\"Error indicating there was a problem building assets.\"\"\"\n\n def __init__(self, argv, error_code):\n Exception.__init__(self)\n self.argv = argv\n self.error_code = error_code\n\n\ndef run_subprocess(argv):\n process = subprocess.Popen(argv)\n process.wait()\n if process.returncode:\n raise BuildError(argv, process.returncode)\n\n\ndef convert_json_to_flatbuffer_binary(json, schema, out_dir):\n \"\"\"Run the flatbuffer compiler on the given json file and schema.\n\n Args:\n json: The path to the json file to convert to a flatbuffer binary.\n schema: The path to the schema to use in the conversion process.\n out_dir: The directory to write the flatbuffer binary.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [FLATC, '-o', out_dir, '-b', schema, json]\n run_subprocess(command)\n\n\ndef convert_png_image_to_webp(png, out, quality=80):\n \"\"\"Run the webp converter on the given png file.\n\n Args:\n png: The path to the png file to convert into a webp file.\n out: The path of the webp to write to.\n quality: The quality of the processed image, where quality is between 0\n (poor) to 100 (very good). Typical value is around 80.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [CWEBP, '-q', str(quality), png, '-o', out]\n run_subprocess(command)\n\n\ndef needs_rebuild(source, target):\n \"\"\"Checks if the source file needs to be rebuilt.\n\n Args:\n source: The source file to be compared.\n target: The target file which we may need to rebuild.\n\n Returns:\n True if the source file is newer than the target, or if the target file does\n not exist.\n \"\"\"\n return not os.path.isfile(target) or os.path.getmtime(source\n ) > os.path.getmtime(target)\n\n\ndef processed_json_path(path):\n \"\"\"Take the path to a raw json asset and convert it to target bin path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')\n\n\ndef generate_flatbuffer_binaries():\n \"\"\"Run the flatbuffer compiler on the all of the flatbuffer json files.\"\"\"\n for element in FLATBUFFERS_CONVERSION_DATA:\n schema = element.schema\n output_path = element.output_path\n if not os.path.exists(output_path):\n os.makedirs(output_path)\n for json in element.input_files:\n target = processed_json_path(json)\n if needs_rebuild(json, target) or needs_rebuild(schema, target):\n convert_json_to_flatbuffer_binary(json, schema, output_path)\n\n\ndef generate_webp_textures():\n \"\"\"Run the webp converter on off of the png files.\"\"\"\n input_files = PNG_TEXTURES['input_files']\n output_files = PNG_TEXTURES['output_files']\n if not os.path.exists(TEXTURE_PATH):\n os.makedirs(TEXTURE_PATH)\n for png, out in zip(input_files, output_files):\n if needs_rebuild(png, out):\n convert_png_image_to_webp(png, out, WEBP_QUALITY)\n\n\ndef clean_webp_textures():\n \"\"\"Delete all the processed webp textures.\"\"\"\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)\n\n\ndef clean_flatbuffer_binaries():\n \"\"\"Delete all the processed flatbuffer binaries.\"\"\"\n for element in FLATBUFFERS_CONVERSION_DATA:\n for json in element.input_files:\n path = processed_json_path(json)\n if os.path.isfile(path):\n os.remove(path)\n\n\ndef clean():\n \"\"\"Delete all the processed files.\"\"\"\n clean_flatbuffer_binaries()\n clean_webp_textures()\n\n\ndef handle_build_error(error):\n \"\"\"Prints an error message to stderr for BuildErrors.\"\"\"\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (' '.\n join(error.argv), str(error.error_code)))\n\n\ndef main(argv):\n \"\"\"Builds or cleans the assets needed for the game.\n\n To build all assets, either call this script without any arguments. Or\n alternatively, call it with the argument 'all'. To just convert the flatbuffer\n json files, call it with 'flatbuffers'. Likewise to convert the png files to\n webp files, call it with 'webp'. To clean all converted files, call it with\n 'clean'.\n\n Args:\n argv: The command line argument containing which command to run.\n\n Returns:\n Returns 0 on success.\n \"\"\"\n target = argv[1] if len(argv) >= 2 else 'all'\n if target not in ('all', 'flatbuffers', 'webp', 'clean'):\n sys.stderr.write('No rule to build target %s.\\n' % target)\n if target in ('all', 'flatbuffers'):\n try:\n generate_flatbuffer_binaries()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target in ('all', 'webp'):\n try:\n generate_webp_textures()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target == 'clean':\n try:\n clean()\n except OSError as error:\n sys.stderr.write('Error cleaning: %s' % str(error))\n return 1\n return 0\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n",
"step-5": "#!/usr/bin/python\n# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Builds all assets under src/rawassets/, writing the results to assets/.\n\nFinds the flatbuffer compiler and cwebp tool and then uses them to convert the\nJSON files to flatbuffer binary files and the png files to webp files so that\nthey can be loaded by the game. This script also includes various 'make' style\nrules. If you just want to build the flatbuffer binaries you can pass\n'flatbuffer' as an argument, or if you want to just build the webp files you can\npass 'cwebp' as an argument. Additionally, if you would like to clean all\ngenerated files, you can call this script with the argument 'clean'.\n\"\"\"\n\nimport distutils.spawn\nimport glob\nimport os\nimport platform\nimport subprocess\nimport sys\n\n# The project root directory, which is one level up from this script's\n# directory.\nPROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),\n os.path.pardir))\n\nPREBUILTS_ROOT = os.path.abspath(os.path.join(os.path.join(PROJECT_ROOT),\n os.path.pardir, os.path.pardir,\n os.path.pardir, os.path.pardir,\n 'prebuilts'))\n\n# Directories that may contains the FlatBuffers compiler.\nFLATBUFFERS_PATHS = [\n os.path.join(PROJECT_ROOT, 'bin'),\n os.path.join(PROJECT_ROOT, 'bin', 'Release'),\n os.path.join(PROJECT_ROOT, 'bin', 'Debug'),\n]\n\n# Directory that contains the cwebp tool.\nCWEBP_BINARY_IN_PATH = distutils.spawn.find_executable('cwebp')\nCWEBP_PATHS = [\n os.path.join(PROJECT_ROOT, 'bin'),\n os.path.join(PROJECT_ROOT, 'bin', 'Release'),\n os.path.join(PROJECT_ROOT, 'bin', 'Debug'),\n os.path.join(PREBUILTS_ROOT, 'libwebp',\n '%s-x86' % platform.system().lower(),\n 'libwebp-0.4.1-%s-x86-32' % platform.system().lower(), 'bin'),\n os.path.dirname(CWEBP_BINARY_IN_PATH) if CWEBP_BINARY_IN_PATH else '',\n]\n\n# Directory to place processed assets.\nASSETS_PATH = os.path.join(PROJECT_ROOT, 'assets')\n\n# Directory where unprocessed assets can be found.\nRAW_ASSETS_PATH = os.path.join(PROJECT_ROOT, 'src', 'rawassets')\n\n# Directory where processed sound flatbuffer data can be found.\nSOUND_PATH = os.path.join(ASSETS_PATH, 'sounds')\n\n# Directory where unprocessed sound flatbuffer data can be found.\nRAW_SOUND_PATH = os.path.join(RAW_ASSETS_PATH, 'sounds')\n\n# Directory where processed material flatbuffer data can be found.\nMATERIAL_PATH = os.path.join(ASSETS_PATH, 'materials')\n\n# Directory where unprocessed material flatbuffer data can be found.\nRAW_MATERIAL_PATH = os.path.join(RAW_ASSETS_PATH, 'materials')\n\n# Directory where processed textures can be found.\nTEXTURE_PATH = os.path.join(ASSETS_PATH, 'textures')\n\n# Directory where unprocessed textures can be found.\nRAW_TEXTURE_PATH = os.path.join(RAW_ASSETS_PATH, 'textures')\n\n# Directory where unprocessed assets can be found.\nSCHEMA_PATH = os.path.join(PROJECT_ROOT, 'src', 'flatbufferschemas')\n\n# Windows uses the .exe extension on executables.\nEXECUTABLE_EXTENSION = '.exe' if platform.system() == 'Windows' else ''\n\n# Name of the flatbuffer executable.\nFLATC_EXECUTABLE_NAME = 'flatc' + EXECUTABLE_EXTENSION\n\n# Name of the cwebp executable.\nCWEBP_EXECUTABLE_NAME = 'cwebp' + EXECUTABLE_EXTENSION\n\n# What level of quality we want to apply to the webp files.\n# Ranges from 0 to 100.\nWEBP_QUALITY = 90\n\n\ndef processed_json_dir(path):\n \"\"\"Take the path to a raw json asset and convert it to target directory.\"\"\"\n return os.path.dirname(path.replace(RAW_ASSETS_PATH, ASSETS_PATH))\n\n\nclass FlatbuffersConversionData(object):\n \"\"\"Holds data needed to convert a set of json files to flatbuffer binaries.\n\n Attributes:\n schema: The path to the flatbuffer schema file.\n input_files: A list of input files to convert.\n output_path: The path to the output directory where the converted files will\n be placed.\n \"\"\"\n\n def __init__(self, schema, input_files, output_path):\n \"\"\"Initializes this object's schema, input_files and output_path.\"\"\"\n self.schema = schema\n self.input_files = input_files\n self.output_path = output_path\n\n\n# A list of json files and their schemas that will be converted to binary files\n# by the flatbuffer compiler.\nFLATBUFFERS_CONVERSION_DATA = [\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'config.fbs'),\n input_files=[os.path.join(RAW_ASSETS_PATH, 'config.json')],\n output_path=ASSETS_PATH),\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'buses.fbs'),\n input_files=[os.path.join(RAW_ASSETS_PATH, 'buses.json')],\n output_path=ASSETS_PATH),\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'sound_assets.fbs'),\n input_files=[os.path.join(RAW_ASSETS_PATH, 'sound_assets.json')],\n output_path=ASSETS_PATH),\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'character_state_machine_def.fbs'),\n input_files=[os.path.join(RAW_ASSETS_PATH,\n 'character_state_machine_def.json')],\n output_path=ASSETS_PATH),\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'sound_collection_def.fbs'),\n input_files=glob.glob(os.path.join(RAW_SOUND_PATH, '*.json')),\n output_path=SOUND_PATH),\n FlatbuffersConversionData(\n schema=os.path.join(SCHEMA_PATH, 'materials.fbs'),\n input_files=glob.glob(os.path.join(RAW_MATERIAL_PATH, '*.json')),\n output_path=MATERIAL_PATH)\n]\n\n\ndef processed_texture_path(path):\n \"\"\"Take the path to a raw png asset and convert it to target webp path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('png', 'webp')\n\n\n# PNG files to convert to webp.\nPNG_TEXTURES = {\n 'input_files': glob.glob(os.path.join(RAW_TEXTURE_PATH, '*.png')),\n 'output_files': [processed_texture_path(png_path)\n for png_path in glob.glob(os.path.join(RAW_TEXTURE_PATH,\n '*.png'))]\n}\n\n\ndef find_executable(name, paths):\n \"\"\"Searches for a file with named `name` in the given paths and returns it.\"\"\"\n for path in paths:\n full_path = os.path.join(path, name)\n if os.path.isfile(full_path):\n return full_path\n # If not found, just assume it's in the PATH.\n return name\n\n\n# Location of FlatBuffers compiler.\nFLATC = find_executable(FLATC_EXECUTABLE_NAME, FLATBUFFERS_PATHS)\n\n# Location of webp compression tool.\nCWEBP = find_executable(CWEBP_EXECUTABLE_NAME, CWEBP_PATHS)\n\n\nclass BuildError(Exception):\n \"\"\"Error indicating there was a problem building assets.\"\"\"\n\n def __init__(self, argv, error_code):\n Exception.__init__(self)\n self.argv = argv\n self.error_code = error_code\n\n\ndef run_subprocess(argv):\n process = subprocess.Popen(argv)\n process.wait()\n if process.returncode:\n raise BuildError(argv, process.returncode)\n\n\ndef convert_json_to_flatbuffer_binary(json, schema, out_dir):\n \"\"\"Run the flatbuffer compiler on the given json file and schema.\n\n Args:\n json: The path to the json file to convert to a flatbuffer binary.\n schema: The path to the schema to use in the conversion process.\n out_dir: The directory to write the flatbuffer binary.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [FLATC, '-o', out_dir, '-b', schema, json]\n run_subprocess(command)\n\n\ndef convert_png_image_to_webp(png, out, quality=80):\n \"\"\"Run the webp converter on the given png file.\n\n Args:\n png: The path to the png file to convert into a webp file.\n out: The path of the webp to write to.\n quality: The quality of the processed image, where quality is between 0\n (poor) to 100 (very good). Typical value is around 80.\n\n Raises:\n BuildError: Process return code was nonzero.\n \"\"\"\n command = [CWEBP, '-q', str(quality), png, '-o', out]\n run_subprocess(command)\n\n\ndef needs_rebuild(source, target):\n \"\"\"Checks if the source file needs to be rebuilt.\n\n Args:\n source: The source file to be compared.\n target: The target file which we may need to rebuild.\n\n Returns:\n True if the source file is newer than the target, or if the target file does\n not exist.\n \"\"\"\n return not os.path.isfile(target) or (\n os.path.getmtime(source) > os.path.getmtime(target))\n\n\ndef processed_json_path(path):\n \"\"\"Take the path to a raw json asset and convert it to target bin path.\"\"\"\n return path.replace(RAW_ASSETS_PATH, ASSETS_PATH).replace('.json', '.bin')\n\n\ndef generate_flatbuffer_binaries():\n \"\"\"Run the flatbuffer compiler on the all of the flatbuffer json files.\"\"\"\n for element in FLATBUFFERS_CONVERSION_DATA:\n schema = element.schema\n output_path = element.output_path\n if not os.path.exists(output_path):\n os.makedirs(output_path)\n for json in element.input_files:\n target = processed_json_path(json)\n if needs_rebuild(json, target) or needs_rebuild(schema, target):\n convert_json_to_flatbuffer_binary(\n json, schema, output_path)\n\n\ndef generate_webp_textures():\n \"\"\"Run the webp converter on off of the png files.\"\"\"\n input_files = PNG_TEXTURES['input_files']\n output_files = PNG_TEXTURES['output_files']\n if not os.path.exists(TEXTURE_PATH):\n os.makedirs(TEXTURE_PATH)\n for png, out in zip(input_files, output_files):\n if needs_rebuild(png, out):\n convert_png_image_to_webp(png, out, WEBP_QUALITY)\n\n\ndef clean_webp_textures():\n \"\"\"Delete all the processed webp textures.\"\"\"\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)\n\n\ndef clean_flatbuffer_binaries():\n \"\"\"Delete all the processed flatbuffer binaries.\"\"\"\n for element in FLATBUFFERS_CONVERSION_DATA:\n for json in element.input_files:\n path = processed_json_path(json)\n if os.path.isfile(path):\n os.remove(path)\n\n\ndef clean():\n \"\"\"Delete all the processed files.\"\"\"\n clean_flatbuffer_binaries()\n clean_webp_textures()\n\n\ndef handle_build_error(error):\n \"\"\"Prints an error message to stderr for BuildErrors.\"\"\"\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (\n ' '.join(error.argv), str(error.error_code)))\n\n\ndef main(argv):\n \"\"\"Builds or cleans the assets needed for the game.\n\n To build all assets, either call this script without any arguments. Or\n alternatively, call it with the argument 'all'. To just convert the flatbuffer\n json files, call it with 'flatbuffers'. Likewise to convert the png files to\n webp files, call it with 'webp'. To clean all converted files, call it with\n 'clean'.\n\n Args:\n argv: The command line argument containing which command to run.\n\n Returns:\n Returns 0 on success.\n \"\"\"\n target = argv[1] if len(argv) >= 2 else 'all'\n if target not in ('all', 'flatbuffers', 'webp', 'clean'):\n sys.stderr.write('No rule to build target %s.\\n' % target)\n\n if target in ('all', 'flatbuffers'):\n try:\n generate_flatbuffer_binaries()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target in ('all', 'webp'):\n try:\n generate_webp_textures()\n except BuildError as error:\n handle_build_error(error)\n return 1\n if target == 'clean':\n try:\n clean()\n except OSError as error:\n sys.stderr.write('Error cleaning: %s' % str(error))\n return 1\n\n return 0\n\n\nif __name__ == '__main__':\n sys.exit(main(sys.argv))\n\n",
"step-ids": [
15,
18,
20,
22,
25
]
}
|
[
15,
18,
20,
22,
25
] |
'''
@name: ros_env_img.py
@brief: This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
@author: Ronja Gueldenring
@version: 3.5
@date: 2019/04/05
'''
# python relevant
import numpy as np
# custom classes
from rl_agent.env_wrapper.ros_env import RosEnvAbs
# ros-relevant
import rospy
class RosEnvImg(RosEnvAbs):
'''
This (abstract) class is a simulation environment wrapper for
the X-Image Representation.
'''
def __init__(self, ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):
state_collector.set_state_mode(0)
super(RosEnvImg, self).__init__(ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc)
def get_observation_(self):
"""
Function returns state that will be fed to the rl-agent
It includes
the laserscan and the waypoint information stored in an image.
:return: state
"""
obs = np.zeros(self.STATE_SIZE, dtype=np.float)
obs[:,:,0] = np.array(self.input_img_.data).reshape((self.STATE_SIZE[0:2]))
if self.debug_:
self.debugger_.show_input_occ_grid(self.input_img_)
self.debugger_.show_input_image(obs[:,:,0])
return obs
|
normal
|
{
"blob_id": "1a979933eb02e9d12dc034021448cbade59abc48",
"index": 2585,
"step-1": "<mask token>\n\n\nclass RosEnvImg(RosEnvAbs):\n <mask token>\n <mask token>\n\n def get_observation_(self):\n \"\"\"\n Function returns state that will be fed to the rl-agent\n It includes\n the laserscan and the waypoint information stored in an image.\n :return: state\n \"\"\"\n obs = np.zeros(self.STATE_SIZE, dtype=np.float)\n obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.\n STATE_SIZE[0:2])\n if self.debug_:\n self.debugger_.show_input_occ_grid(self.input_img_)\n self.debugger_.show_input_image(obs[:, :, 0])\n return obs\n",
"step-2": "<mask token>\n\n\nclass RosEnvImg(RosEnvAbs):\n <mask token>\n\n def __init__(self, ns, state_collector, execution_mode, task_mode,\n state_size, observation_space, stack_offset, action_size,\n action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):\n state_collector.set_state_mode(0)\n super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,\n task_mode, state_size, observation_space, stack_offset,\n action_size, action_space, debug, goal_radius, wp_radius,\n robot_radius, reward_fnc)\n\n def get_observation_(self):\n \"\"\"\n Function returns state that will be fed to the rl-agent\n It includes\n the laserscan and the waypoint information stored in an image.\n :return: state\n \"\"\"\n obs = np.zeros(self.STATE_SIZE, dtype=np.float)\n obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.\n STATE_SIZE[0:2])\n if self.debug_:\n self.debugger_.show_input_occ_grid(self.input_img_)\n self.debugger_.show_input_image(obs[:, :, 0])\n return obs\n",
"step-3": "<mask token>\n\n\nclass RosEnvImg(RosEnvAbs):\n \"\"\"\n This (abstract) class is a simulation environment wrapper for\n the X-Image Representation.\n \"\"\"\n\n def __init__(self, ns, state_collector, execution_mode, task_mode,\n state_size, observation_space, stack_offset, action_size,\n action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):\n state_collector.set_state_mode(0)\n super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,\n task_mode, state_size, observation_space, stack_offset,\n action_size, action_space, debug, goal_radius, wp_radius,\n robot_radius, reward_fnc)\n\n def get_observation_(self):\n \"\"\"\n Function returns state that will be fed to the rl-agent\n It includes\n the laserscan and the waypoint information stored in an image.\n :return: state\n \"\"\"\n obs = np.zeros(self.STATE_SIZE, dtype=np.float)\n obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.\n STATE_SIZE[0:2])\n if self.debug_:\n self.debugger_.show_input_occ_grid(self.input_img_)\n self.debugger_.show_input_image(obs[:, :, 0])\n return obs\n",
"step-4": "<mask token>\nimport numpy as np\nfrom rl_agent.env_wrapper.ros_env import RosEnvAbs\nimport rospy\n\n\nclass RosEnvImg(RosEnvAbs):\n \"\"\"\n This (abstract) class is a simulation environment wrapper for\n the X-Image Representation.\n \"\"\"\n\n def __init__(self, ns, state_collector, execution_mode, task_mode,\n state_size, observation_space, stack_offset, action_size,\n action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):\n state_collector.set_state_mode(0)\n super(RosEnvImg, self).__init__(ns, state_collector, execution_mode,\n task_mode, state_size, observation_space, stack_offset,\n action_size, action_space, debug, goal_radius, wp_radius,\n robot_radius, reward_fnc)\n\n def get_observation_(self):\n \"\"\"\n Function returns state that will be fed to the rl-agent\n It includes\n the laserscan and the waypoint information stored in an image.\n :return: state\n \"\"\"\n obs = np.zeros(self.STATE_SIZE, dtype=np.float)\n obs[:, :, 0] = np.array(self.input_img_.data).reshape(self.\n STATE_SIZE[0:2])\n if self.debug_:\n self.debugger_.show_input_occ_grid(self.input_img_)\n self.debugger_.show_input_image(obs[:, :, 0])\n return obs\n",
"step-5": "'''\n @name: ros_env_img.py\n @brief: This (abstract) class is a simulation environment wrapper for\n the X-Image Representation.\n @author: Ronja Gueldenring\n @version: 3.5\n @date: 2019/04/05\n'''\n\n\n# python relevant\nimport numpy as np\n\n# custom classes\nfrom rl_agent.env_wrapper.ros_env import RosEnvAbs\n\n# ros-relevant\nimport rospy\n\nclass RosEnvImg(RosEnvAbs):\n '''\n This (abstract) class is a simulation environment wrapper for\n the X-Image Representation.\n '''\n def __init__(self, ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc):\n state_collector.set_state_mode(0)\n super(RosEnvImg, self).__init__(ns, state_collector, execution_mode, task_mode, state_size, observation_space, stack_offset, action_size, action_space, debug, goal_radius, wp_radius, robot_radius, reward_fnc)\n\n\n def get_observation_(self):\n \"\"\"\n Function returns state that will be fed to the rl-agent\n It includes\n the laserscan and the waypoint information stored in an image.\n :return: state\n \"\"\"\n obs = np.zeros(self.STATE_SIZE, dtype=np.float)\n obs[:,:,0] = np.array(self.input_img_.data).reshape((self.STATE_SIZE[0:2]))\n\n if self.debug_:\n self.debugger_.show_input_occ_grid(self.input_img_)\n self.debugger_.show_input_image(obs[:,:,0])\n return obs\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from rest_framework import serializers
from .models import *
class MovieSerializer(serializers.Serializer):
movie_name = serializers.ListField(child=serializers.CharField())
class FilmSerializer(serializers.ModelSerializer):
class Meta:
model = Movie
fields = '__all__'
|
normal
|
{
"blob_id": "0509afdce0d28cc04f4452472881fe9c5e4fbcc4",
"index": 7825,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass MovieSerializer(serializers.Serializer):\n <mask token>\n\n\nclass FilmSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Movie\n fields = '__all__'\n",
"step-3": "<mask token>\n\n\nclass MovieSerializer(serializers.Serializer):\n movie_name = serializers.ListField(child=serializers.CharField())\n\n\nclass FilmSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Movie\n fields = '__all__'\n",
"step-4": "from rest_framework import serializers\nfrom .models import *\n\n\nclass MovieSerializer(serializers.Serializer):\n movie_name = serializers.ListField(child=serializers.CharField())\n\n\nclass FilmSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Movie\n fields = '__all__'\n",
"step-5": null,
"step-ids": [
0,
2,
3,
4
]
}
|
[
0,
2,
3,
4
] |
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired
from flask_wtf import FlaskForm
# ...
class LoginForm(FlaskForm):
"""登录表单类"""
username = StringField('用户名', validators=[DataRequired()])
password = PasswordField('密码', validators=[DataRequired()])
|
normal
|
{
"blob_id": "6ad2014191215dac97ad6fc6a026512c3d1866dc",
"index": 8244,
"step-1": "<mask token>\n\n\nclass LoginForm(FlaskForm):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass LoginForm(FlaskForm):\n <mask token>\n username = StringField('用户名', validators=[DataRequired()])\n password = PasswordField('密码', validators=[DataRequired()])\n",
"step-3": "<mask token>\n\n\nclass LoginForm(FlaskForm):\n \"\"\"登录表单类\"\"\"\n username = StringField('用户名', validators=[DataRequired()])\n password = PasswordField('密码', validators=[DataRequired()])\n",
"step-4": "from wtforms import StringField, PasswordField\nfrom wtforms.validators import DataRequired\nfrom flask_wtf import FlaskForm\n\n\nclass LoginForm(FlaskForm):\n \"\"\"登录表单类\"\"\"\n username = StringField('用户名', validators=[DataRequired()])\n password = PasswordField('密码', validators=[DataRequired()])\n",
"step-5": "from wtforms import StringField, PasswordField\nfrom wtforms.validators import DataRequired\nfrom flask_wtf import FlaskForm\n\n\n# ...\nclass LoginForm(FlaskForm):\n \"\"\"登录表单类\"\"\"\n username = StringField('用户名', validators=[DataRequired()])\n password = PasswordField('密码', validators=[DataRequired()])",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class MigrationVisitor(semtk.DefaultSemTKVisitor):
def __init__(self, data: RemoveIsATypeOf):
self.data = data
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@dataclass
class RemoveIsATypeOf(OntologyChange):
<|reserved_special_token_0|>
name_space: NameSpace
class_id: str
property_id: str
range_id: str
def text_description(self) ->str:
prop = stylize_property(self.property_id)
range_str = stylize_property(self.range_id)
return f'Property {prop} used to be a type of {range_str}, no longer.'
def migrate_json(self, json: semtk.SemTKJSON) ->None:
json.accept(MigrationVisitor(self))
class MigrationVisitor(semtk.DefaultSemTKVisitor):
def __init__(self, data: RemoveIsATypeOf):
self.data = data
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@dataclass
class RemoveIsATypeOf(OntologyChange):
"""
Represents an ontology change where:
property_id is a type of from_property_id.
has been removed.
"""
name_space: NameSpace
class_id: str
property_id: str
range_id: str
def text_description(self) ->str:
prop = stylize_property(self.property_id)
range_str = stylize_property(self.range_id)
return f'Property {prop} used to be a type of {range_str}, no longer.'
def migrate_json(self, json: semtk.SemTKJSON) ->None:
json.accept(MigrationVisitor(self))
class MigrationVisitor(semtk.DefaultSemTKVisitor):
def __init__(self, data: RemoveIsATypeOf):
self.data = data
<|reserved_special_token_1|>
from dataclasses import dataclass
import semtk
from migration_helpers.name_space import NameSpace, get_uri
from ontology_changes.ontology_change import stylize_property, OntologyChange
@dataclass
class RemoveIsATypeOf(OntologyChange):
"""
Represents an ontology change where:
property_id is a type of from_property_id.
has been removed.
"""
name_space: NameSpace
class_id: str
property_id: str
range_id: str
def text_description(self) ->str:
prop = stylize_property(self.property_id)
range_str = stylize_property(self.range_id)
return f'Property {prop} used to be a type of {range_str}, no longer.'
def migrate_json(self, json: semtk.SemTKJSON) ->None:
json.accept(MigrationVisitor(self))
class MigrationVisitor(semtk.DefaultSemTKVisitor):
def __init__(self, data: RemoveIsATypeOf):
self.data = data
<|reserved_special_token_1|>
# Copyright (c) 2020, Galois, Inc.
#
# All Rights Reserved
#
# This material is based upon work supported by the Defense Advanced Research
# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203.
#
# Any opinions, findings and conclusions or recommendations expressed in this
# material are those of the author(s) and do not necessarily reflect the views
# of the Defense Advanced Research Projects Agency (DARPA).
from dataclasses import dataclass
import semtk
from migration_helpers.name_space import NameSpace, get_uri
from ontology_changes.ontology_change import stylize_property, OntologyChange
@dataclass
class RemoveIsATypeOf(OntologyChange):
"""
Represents an ontology change where:
property_id is a type of from_property_id.
has been removed.
"""
name_space: NameSpace
class_id: str
property_id: str
range_id: str
def text_description(self) -> str:
prop = stylize_property(self.property_id)
range_str = stylize_property(self.range_id)
return f"Property {prop} used to be a type of {range_str}, no longer."
def migrate_json(self, json: semtk.SemTKJSON) -> None:
json.accept(MigrationVisitor(self))
class MigrationVisitor(semtk.DefaultSemTKVisitor):
def __init__(self, data: RemoveIsATypeOf):
self.data = data
# TODO?
|
flexible
|
{
"blob_id": "41294c803cf42611fa003f21b74a49dd5576a8e8",
"index": 5973,
"step-1": "<mask token>\n\n\nclass MigrationVisitor(semtk.DefaultSemTKVisitor):\n\n def __init__(self, data: RemoveIsATypeOf):\n self.data = data\n",
"step-2": "<mask token>\n\n\n@dataclass\nclass RemoveIsATypeOf(OntologyChange):\n <mask token>\n name_space: NameSpace\n class_id: str\n property_id: str\n range_id: str\n\n def text_description(self) ->str:\n prop = stylize_property(self.property_id)\n range_str = stylize_property(self.range_id)\n return f'Property {prop} used to be a type of {range_str}, no longer.'\n\n def migrate_json(self, json: semtk.SemTKJSON) ->None:\n json.accept(MigrationVisitor(self))\n\n\nclass MigrationVisitor(semtk.DefaultSemTKVisitor):\n\n def __init__(self, data: RemoveIsATypeOf):\n self.data = data\n",
"step-3": "<mask token>\n\n\n@dataclass\nclass RemoveIsATypeOf(OntologyChange):\n \"\"\"\n Represents an ontology change where:\n\n property_id is a type of from_property_id.\n\n has been removed.\n \"\"\"\n name_space: NameSpace\n class_id: str\n property_id: str\n range_id: str\n\n def text_description(self) ->str:\n prop = stylize_property(self.property_id)\n range_str = stylize_property(self.range_id)\n return f'Property {prop} used to be a type of {range_str}, no longer.'\n\n def migrate_json(self, json: semtk.SemTKJSON) ->None:\n json.accept(MigrationVisitor(self))\n\n\nclass MigrationVisitor(semtk.DefaultSemTKVisitor):\n\n def __init__(self, data: RemoveIsATypeOf):\n self.data = data\n",
"step-4": "from dataclasses import dataclass\nimport semtk\nfrom migration_helpers.name_space import NameSpace, get_uri\nfrom ontology_changes.ontology_change import stylize_property, OntologyChange\n\n\n@dataclass\nclass RemoveIsATypeOf(OntologyChange):\n \"\"\"\n Represents an ontology change where:\n\n property_id is a type of from_property_id.\n\n has been removed.\n \"\"\"\n name_space: NameSpace\n class_id: str\n property_id: str\n range_id: str\n\n def text_description(self) ->str:\n prop = stylize_property(self.property_id)\n range_str = stylize_property(self.range_id)\n return f'Property {prop} used to be a type of {range_str}, no longer.'\n\n def migrate_json(self, json: semtk.SemTKJSON) ->None:\n json.accept(MigrationVisitor(self))\n\n\nclass MigrationVisitor(semtk.DefaultSemTKVisitor):\n\n def __init__(self, data: RemoveIsATypeOf):\n self.data = data\n",
"step-5": "# Copyright (c) 2020, Galois, Inc.\n#\n# All Rights Reserved\n#\n# This material is based upon work supported by the Defense Advanced Research\n# Projects Agency (DARPA) under Contract No. FA8750-20-C-0203.\n#\n# Any opinions, findings and conclusions or recommendations expressed in this\n# material are those of the author(s) and do not necessarily reflect the views\n# of the Defense Advanced Research Projects Agency (DARPA).\n\nfrom dataclasses import dataclass\n\nimport semtk\n\nfrom migration_helpers.name_space import NameSpace, get_uri\nfrom ontology_changes.ontology_change import stylize_property, OntologyChange\n\n\n@dataclass\nclass RemoveIsATypeOf(OntologyChange):\n \"\"\"\n Represents an ontology change where:\n\n property_id is a type of from_property_id.\n\n has been removed.\n \"\"\"\n\n name_space: NameSpace\n class_id: str\n property_id: str\n range_id: str\n\n def text_description(self) -> str:\n prop = stylize_property(self.property_id)\n range_str = stylize_property(self.range_id)\n return f\"Property {prop} used to be a type of {range_str}, no longer.\"\n\n def migrate_json(self, json: semtk.SemTKJSON) -> None:\n json.accept(MigrationVisitor(self))\n\n\nclass MigrationVisitor(semtk.DefaultSemTKVisitor):\n def __init__(self, data: RemoveIsATypeOf):\n self.data = data\n\n # TODO?\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
# Generated by Django 2.0.2 on 2018-06-10 18:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Expression',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.FloatField()),
],
),
migrations.CreateModel(
name='Gene',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gene_id', models.CharField(max_length=20, unique=True)),
('summary', models.CharField(max_length=10000)),
],
),
migrations.CreateModel(
name='MutualInformation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.FloatField()),
('dataset', models.CharField(max_length=1000)),
('gene1', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gene1', to='plots.Gene')),
('gene2', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gene2', to='plots.Gene')),
],
),
migrations.CreateModel(
name='Pca',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pc1', models.FloatField()),
('pc2', models.FloatField()),
],
),
migrations.CreateModel(
name='Sample',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=1000)),
('cell_type', models.CharField(max_length=100)),
('dataset', models.CharField(max_length=1000)),
],
),
migrations.AddField(
model_name='pca',
name='sample',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Sample'),
),
migrations.AddField(
model_name='expression',
name='gene',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Gene'),
),
migrations.AddField(
model_name='expression',
name='sample',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Sample'),
),
]
|
normal
|
{
"blob_id": "87e0b9dc518d439f71e261d5c5047153324919ba",
"index": 9547,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Expression', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('value', models.FloatField())]),\n migrations.CreateModel(name='Gene', fields=[('id', models.AutoField\n (auto_created=True, primary_key=True, serialize=False, verbose_name\n ='ID')), ('gene_id', models.CharField(max_length=20, unique=True)),\n ('summary', models.CharField(max_length=10000))]), migrations.\n CreateModel(name='MutualInformation', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('value', models.FloatField()), ('dataset',\n models.CharField(max_length=1000)), ('gene1', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, related_name='gene1',\n to='plots.Gene')), ('gene2', models.ForeignKey(on_delete=django.db.\n models.deletion.CASCADE, related_name='gene2', to='plots.Gene'))]),\n migrations.CreateModel(name='Pca', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('pc1', models.FloatField()), ('pc2', models.FloatField())]\n ), migrations.CreateModel(name='Sample', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('name', models.CharField(max_length=1000)), (\n 'cell_type', models.CharField(max_length=100)), ('dataset', models.\n CharField(max_length=1000))]), migrations.AddField(model_name='pca',\n name='sample', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='plots.Sample')), migrations.AddField(\n model_name='expression', name='gene', field=models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to='plots.Gene')),\n migrations.AddField(model_name='expression', name='sample', field=\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'plots.Sample'))]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Expression', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('value', models.FloatField())]),\n migrations.CreateModel(name='Gene', fields=[('id', models.AutoField\n (auto_created=True, primary_key=True, serialize=False, verbose_name\n ='ID')), ('gene_id', models.CharField(max_length=20, unique=True)),\n ('summary', models.CharField(max_length=10000))]), migrations.\n CreateModel(name='MutualInformation', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('value', models.FloatField()), ('dataset',\n models.CharField(max_length=1000)), ('gene1', models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, related_name='gene1',\n to='plots.Gene')), ('gene2', models.ForeignKey(on_delete=django.db.\n models.deletion.CASCADE, related_name='gene2', to='plots.Gene'))]),\n migrations.CreateModel(name='Pca', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('pc1', models.FloatField()), ('pc2', models.FloatField())]\n ), migrations.CreateModel(name='Sample', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('name', models.CharField(max_length=1000)), (\n 'cell_type', models.CharField(max_length=100)), ('dataset', models.\n CharField(max_length=1000))]), migrations.AddField(model_name='pca',\n name='sample', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='plots.Sample')), migrations.AddField(\n model_name='expression', name='gene', field=models.ForeignKey(\n on_delete=django.db.models.deletion.CASCADE, to='plots.Gene')),\n migrations.AddField(model_name='expression', name='sample', field=\n models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=\n 'plots.Sample'))]\n",
"step-5": "# Generated by Django 2.0.2 on 2018-06-10 18:24\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Expression',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('value', models.FloatField()),\n ],\n ),\n migrations.CreateModel(\n name='Gene',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('gene_id', models.CharField(max_length=20, unique=True)),\n ('summary', models.CharField(max_length=10000)),\n ],\n ),\n migrations.CreateModel(\n name='MutualInformation',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('value', models.FloatField()),\n ('dataset', models.CharField(max_length=1000)),\n ('gene1', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gene1', to='plots.Gene')),\n ('gene2', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gene2', to='plots.Gene')),\n ],\n ),\n migrations.CreateModel(\n name='Pca',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('pc1', models.FloatField()),\n ('pc2', models.FloatField()),\n ],\n ),\n migrations.CreateModel(\n name='Sample',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=1000)),\n ('cell_type', models.CharField(max_length=100)),\n ('dataset', models.CharField(max_length=1000)),\n ],\n ),\n migrations.AddField(\n model_name='pca',\n name='sample',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Sample'),\n ),\n migrations.AddField(\n model_name='expression',\n name='gene',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Gene'),\n ),\n migrations.AddField(\n model_name='expression',\n name='sample',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='plots.Sample'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.apps import AppConfig
class CheckoutConfig(AppConfig):
name = "checkout"
# Override the ready method and import the signals module
# so that update_on_save and update_on_delete will be called
# after an OrderLineItem model instance is saved or deleted
def ready(self):
import checkout.signals
|
normal
|
{
"blob_id": "74e3f4cd7b09d9b96feb3f927a509b113481eaed",
"index": 7575,
"step-1": "<mask token>\n\n\nclass CheckoutConfig(AppConfig):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass CheckoutConfig(AppConfig):\n <mask token>\n\n def ready(self):\n import checkout.signals\n",
"step-3": "<mask token>\n\n\nclass CheckoutConfig(AppConfig):\n name = 'checkout'\n\n def ready(self):\n import checkout.signals\n",
"step-4": "from django.apps import AppConfig\n\n\nclass CheckoutConfig(AppConfig):\n name = 'checkout'\n\n def ready(self):\n import checkout.signals\n",
"step-5": "from django.apps import AppConfig\n\n\nclass CheckoutConfig(AppConfig):\n name = \"checkout\"\n\n # Override the ready method and import the signals module\n # so that update_on_save and update_on_delete will be called\n # after an OrderLineItem model instance is saved or deleted\n def ready(self):\n import checkout.signals\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 2.7.5 (default, Jul 13 2018, 13:06:57)
# [GCC 4.8.5 20150623 (Red Hat 4.8.5-28)]
# Embedded file name: ./authx/migrations/0001_initial.py
# Compiled at: 2018-08-23 19:33:14
# Size of source mod 2**32: 2715 bytes
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion, uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length')]
operations = [
migrations.CreateModel(name='User',
fields=[
(
'password', models.CharField(max_length=128, verbose_name='password')),
(
'last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
(
'is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
(
'id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
(
'created_at', models.DateTimeField(auto_now_add=True)),
(
'updated_at', models.DateTimeField(auto_now=True)),
(
'username', models.CharField(max_length=11, unique=True, verbose_name='')),
(
'fullname', models.CharField(blank=True, max_length=80, verbose_name='')),
(
'thumbnail', models.ImageField(blank=True, null=True, upload_to='thumbnail', verbose_name='')),
(
'is_active', models.BooleanField(default=True)),
(
'is_admin', models.BooleanField(default=False)),
(
'is_staff', models.BooleanField(default=False)),
(
'phone_number', models.CharField(blank=True, max_length=30, null=True)),
(
'email', models.CharField(blank=True, max_length=30, null=True)),
(
'groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
(
'owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
(
'user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'))],
options={'verbose_name':'',
'verbose_name_plural':'',
'db_table':'auth_user',
'permissions':(('view_user', 'Can drive'), )})]
# okay decompiling ./restful/hawkeye/authx/migrations/0001_initial.pyc
|
normal
|
{
"blob_id": "1073845131afb2446ca68ee10092eeb00feef800",
"index": 3585,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0008_alter_user_username_max_length')]\n operations = [migrations.CreateModel(name='User', fields=[('password',\n models.CharField(max_length=128, verbose_name='password')), (\n 'last_login', models.DateTimeField(blank=True, null=True,\n verbose_name='last login')), ('is_superuser', models.BooleanField(\n default=False, help_text=\n 'Designates that this user has all permissions without explicitly assigning them.'\n , verbose_name='superuser status')), ('id', models.UUIDField(\n default=uuid.uuid4, editable=False, primary_key=True, serialize=\n False)), ('created_at', models.DateTimeField(auto_now_add=True)), (\n 'updated_at', models.DateTimeField(auto_now=True)), ('username',\n models.CharField(max_length=11, unique=True, verbose_name='')), (\n 'fullname', models.CharField(blank=True, max_length=80,\n verbose_name='')), ('thumbnail', models.ImageField(blank=True, null\n =True, upload_to='thumbnail', verbose_name='')), ('is_active',\n models.BooleanField(default=True)), ('is_admin', models.\n BooleanField(default=False)), ('is_staff', models.BooleanField(\n default=False)), ('phone_number', models.CharField(blank=True,\n max_length=30, null=True)), ('email', models.CharField(blank=True,\n max_length=30, null=True)), ('groups', models.ManyToManyField(blank\n =True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to=\n 'auth.Group', verbose_name='groups')), ('owner', models.ForeignKey(\n blank=True, null=True, on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)), ('user_permissions', models.\n ManyToManyField(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions'))], options={'verbose_name': '',\n 'verbose_name_plural': '', 'db_table': 'auth_user', 'permissions':\n (('view_user', 'Can drive'),)})]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion, uuid\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('auth', '0008_alter_user_username_max_length')]\n operations = [migrations.CreateModel(name='User', fields=[('password',\n models.CharField(max_length=128, verbose_name='password')), (\n 'last_login', models.DateTimeField(blank=True, null=True,\n verbose_name='last login')), ('is_superuser', models.BooleanField(\n default=False, help_text=\n 'Designates that this user has all permissions without explicitly assigning them.'\n , verbose_name='superuser status')), ('id', models.UUIDField(\n default=uuid.uuid4, editable=False, primary_key=True, serialize=\n False)), ('created_at', models.DateTimeField(auto_now_add=True)), (\n 'updated_at', models.DateTimeField(auto_now=True)), ('username',\n models.CharField(max_length=11, unique=True, verbose_name='')), (\n 'fullname', models.CharField(blank=True, max_length=80,\n verbose_name='')), ('thumbnail', models.ImageField(blank=True, null\n =True, upload_to='thumbnail', verbose_name='')), ('is_active',\n models.BooleanField(default=True)), ('is_admin', models.\n BooleanField(default=False)), ('is_staff', models.BooleanField(\n default=False)), ('phone_number', models.CharField(blank=True,\n max_length=30, null=True)), ('email', models.CharField(blank=True,\n max_length=30, null=True)), ('groups', models.ManyToManyField(blank\n =True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to=\n 'auth.Group', verbose_name='groups')), ('owner', models.ForeignKey(\n blank=True, null=True, on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL)), ('user_permissions', models.\n ManyToManyField(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions'))], options={'verbose_name': '',\n 'verbose_name_plural': '', 'db_table': 'auth_user', 'permissions':\n (('view_user', 'Can drive'),)})]\n",
"step-5": "# uncompyle6 version 3.2.3\n# Python bytecode 3.6 (3379)\n# Decompiled from: Python 2.7.5 (default, Jul 13 2018, 13:06:57) \n# [GCC 4.8.5 20150623 (Red Hat 4.8.5-28)]\n# Embedded file name: ./authx/migrations/0001_initial.py\n# Compiled at: 2018-08-23 19:33:14\n# Size of source mod 2**32: 2715 bytes\nfrom __future__ import unicode_literals\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion, uuid\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [\n ('auth', '0008_alter_user_username_max_length')]\n operations = [\n migrations.CreateModel(name='User',\n fields=[\n (\n 'password', models.CharField(max_length=128, verbose_name='password')),\n (\n 'last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),\n (\n 'is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),\n (\n 'id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),\n (\n 'created_at', models.DateTimeField(auto_now_add=True)),\n (\n 'updated_at', models.DateTimeField(auto_now=True)),\n (\n 'username', models.CharField(max_length=11, unique=True, verbose_name='')),\n (\n 'fullname', models.CharField(blank=True, max_length=80, verbose_name='')),\n (\n 'thumbnail', models.ImageField(blank=True, null=True, upload_to='thumbnail', verbose_name='')),\n (\n 'is_active', models.BooleanField(default=True)),\n (\n 'is_admin', models.BooleanField(default=False)),\n (\n 'is_staff', models.BooleanField(default=False)),\n (\n 'phone_number', models.CharField(blank=True, max_length=30, null=True)),\n (\n 'email', models.CharField(blank=True, max_length=30, null=True)),\n (\n 'groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),\n (\n 'owner', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n (\n 'user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'))],\n options={'verbose_name':'', \n 'verbose_name_plural':'', \n 'db_table':'auth_user', \n 'permissions':(('view_user', 'Can drive'), )})]\n# okay decompiling ./restful/hawkeye/authx/migrations/0001_initial.pyc\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
t_dim_2 = [[1, 2], [3, 4]]
def z(i, j, dim):
t = dim ** 2
if dim == 2:
return t_dim_2[i-1][j-1]
d = dim//2
if i <= d: # I or II
if j <= d:
return z(i, j, d) #I
else:
j -= d
return t//4 + z(i, j, d) # II
else: # III or IV
if j <=d:
i -= d
return t//2 + z(i, j, d) # III
else:
i -= d
j -= d
return 3*t//4 + z(i, j, d) # IV
n = 2
i = 3
j = 3
dim = 2**n
print(z(i,j,dim))
|
normal
|
{
"blob_id": "07ed8c12e8e5c568c897b6b632c48831267eba51",
"index": 1815,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef z(i, j, dim):\n t = dim ** 2\n if dim == 2:\n return t_dim_2[i - 1][j - 1]\n d = dim // 2\n if i <= d:\n if j <= d:\n return z(i, j, d)\n else:\n j -= d\n return t // 4 + z(i, j, d)\n elif j <= d:\n i -= d\n return t // 2 + z(i, j, d)\n else:\n i -= d\n j -= d\n return 3 * t // 4 + z(i, j, d)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef z(i, j, dim):\n t = dim ** 2\n if dim == 2:\n return t_dim_2[i - 1][j - 1]\n d = dim // 2\n if i <= d:\n if j <= d:\n return z(i, j, d)\n else:\n j -= d\n return t // 4 + z(i, j, d)\n elif j <= d:\n i -= d\n return t // 2 + z(i, j, d)\n else:\n i -= d\n j -= d\n return 3 * t // 4 + z(i, j, d)\n\n\n<mask token>\nprint(z(i, j, dim))\n",
"step-4": "t_dim_2 = [[1, 2], [3, 4]]\n\n\ndef z(i, j, dim):\n t = dim ** 2\n if dim == 2:\n return t_dim_2[i - 1][j - 1]\n d = dim // 2\n if i <= d:\n if j <= d:\n return z(i, j, d)\n else:\n j -= d\n return t // 4 + z(i, j, d)\n elif j <= d:\n i -= d\n return t // 2 + z(i, j, d)\n else:\n i -= d\n j -= d\n return 3 * t // 4 + z(i, j, d)\n\n\nn = 2\ni = 3\nj = 3\ndim = 2 ** n\nprint(z(i, j, dim))\n",
"step-5": "\nt_dim_2 = [[1, 2], [3, 4]]\n\ndef z(i, j, dim):\n t = dim ** 2\n if dim == 2:\n return t_dim_2[i-1][j-1]\n\n d = dim//2\n if i <= d: # I or II\n if j <= d:\n return z(i, j, d) #I\n else:\n j -= d\n return t//4 + z(i, j, d) # II\n else: # III or IV\n if j <=d:\n i -= d\n return t//2 + z(i, j, d) # III\n else:\n i -= d\n j -= d\n return 3*t//4 + z(i, j, d) # IV\nn = 2\ni = 3\nj = 3\ndim = 2**n\nprint(z(i,j,dim))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from apps.mastermind.core.domain.domain import Color, Game
from apps.mastermind.infrastructure.mongo_persistence.uow import MongoUnitOfWork
from composite_root.container import provide
class GameMother:
async def a_game(
self,
num_slots: int,
num_colors: int,
max_guesses: int,
secret_code: list[Color],
reference: str | None = None,
) -> Game:
async with provide(MongoUnitOfWork) as uow:
game = Game.new(
id=uow.games.next_id(),
num_slots=num_slots,
num_colors=num_colors,
max_guesses=max_guesses,
)
game.secret_code = secret_code
if reference:
game.reference = reference
await uow.games.asave(game)
await uow.commit()
return game
|
normal
|
{
"blob_id": "8457cdde8f8ad069505c7729b8217e5d272be41e",
"index": 957,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass GameMother:\n\n async def a_game(self, num_slots: int, num_colors: int, max_guesses:\n int, secret_code: list[Color], reference: (str | None)=None) ->Game:\n async with provide(MongoUnitOfWork) as uow:\n game = Game.new(id=uow.games.next_id(), num_slots=num_slots,\n num_colors=num_colors, max_guesses=max_guesses)\n game.secret_code = secret_code\n if reference:\n game.reference = reference\n await uow.games.asave(game)\n await uow.commit()\n return game\n",
"step-3": "from apps.mastermind.core.domain.domain import Color, Game\nfrom apps.mastermind.infrastructure.mongo_persistence.uow import MongoUnitOfWork\nfrom composite_root.container import provide\n\n\nclass GameMother:\n\n async def a_game(self, num_slots: int, num_colors: int, max_guesses:\n int, secret_code: list[Color], reference: (str | None)=None) ->Game:\n async with provide(MongoUnitOfWork) as uow:\n game = Game.new(id=uow.games.next_id(), num_slots=num_slots,\n num_colors=num_colors, max_guesses=max_guesses)\n game.secret_code = secret_code\n if reference:\n game.reference = reference\n await uow.games.asave(game)\n await uow.commit()\n return game\n",
"step-4": "from apps.mastermind.core.domain.domain import Color, Game\nfrom apps.mastermind.infrastructure.mongo_persistence.uow import MongoUnitOfWork\nfrom composite_root.container import provide\n\n\nclass GameMother:\n async def a_game(\n self,\n num_slots: int,\n num_colors: int,\n max_guesses: int,\n secret_code: list[Color],\n reference: str | None = None,\n ) -> Game:\n async with provide(MongoUnitOfWork) as uow:\n game = Game.new(\n id=uow.games.next_id(),\n num_slots=num_slots,\n num_colors=num_colors,\n max_guesses=max_guesses,\n )\n game.secret_code = secret_code\n\n if reference:\n game.reference = reference\n\n await uow.games.asave(game)\n await uow.commit()\n return game\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
CHROME_WEBDRIVER = 'c:/users/username/project/chromedriver.exe'
WEBSITE_PDF_CONVERTER = 'https://www.ilovepdf.com/merge_pdf'
PDF_FILES = 'c:/users/username/project'
<|reserved_special_token_1|>
"""
If you are using MultiScript Editor make sure to set PYTHONPATH to Winexs' editor.
You can use set PYTHONPATH=c:/users/username/myscripts
Set paths according to your project!
"""
CHROME_WEBDRIVER = 'c:/users/username/project/chromedriver.exe'
WEBSITE_PDF_CONVERTER = 'https://www.ilovepdf.com/merge_pdf'
PDF_FILES = 'c:/users/username/project'
|
flexible
|
{
"blob_id": "0fdbdfe98496ebedb112c85b79836292ffa3a5a9",
"index": 9076,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nCHROME_WEBDRIVER = 'c:/users/username/project/chromedriver.exe'\nWEBSITE_PDF_CONVERTER = 'https://www.ilovepdf.com/merge_pdf'\nPDF_FILES = 'c:/users/username/project'\n",
"step-3": "\"\"\"\nIf you are using MultiScript Editor make sure to set PYTHONPATH to Winexs' editor.\nYou can use set PYTHONPATH=c:/users/username/myscripts\n\nSet paths according to your project!\n\"\"\"\n\nCHROME_WEBDRIVER = 'c:/users/username/project/chromedriver.exe'\nWEBSITE_PDF_CONVERTER = 'https://www.ilovepdf.com/merge_pdf'\nPDF_FILES = 'c:/users/username/project'",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import cv2 as cv
import numpy as np
img = np.zeros((512, 512, 3), np.uint8)
cv.line(img, (0, 0), (511, 511), (255, 255, 255), 10)
cv.rectangle(img, (384, 0), (510, 128), (255, 0, 0), 3)
cv.circle(img, (200, 60), 20, (0, 100, 255), 3)
cv.ellipse(img, (250, 250), (100, 50), 90, 0, 180, (255, 0, 255), 3)
font = cv.FONT_HERSHEY_SIMPLEX
cv.putText(img, 'OpenCV', (10, 500), font, 4, (255, 0, 255), 3)
cv.imshow('d1', img)
cv.waitKey(0)
|
normal
|
{
"blob_id": "08c5f5ac568b7575d8082976336a5893951b53c2",
"index": 9269,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncv.line(img, (0, 0), (511, 511), (255, 255, 255), 10)\ncv.rectangle(img, (384, 0), (510, 128), (255, 0, 0), 3)\ncv.circle(img, (200, 60), 20, (0, 100, 255), 3)\ncv.ellipse(img, (250, 250), (100, 50), 90, 0, 180, (255, 0, 255), 3)\n<mask token>\ncv.putText(img, 'OpenCV', (10, 500), font, 4, (255, 0, 255), 3)\ncv.imshow('d1', img)\ncv.waitKey(0)\n",
"step-3": "<mask token>\nimg = np.zeros((512, 512, 3), np.uint8)\ncv.line(img, (0, 0), (511, 511), (255, 255, 255), 10)\ncv.rectangle(img, (384, 0), (510, 128), (255, 0, 0), 3)\ncv.circle(img, (200, 60), 20, (0, 100, 255), 3)\ncv.ellipse(img, (250, 250), (100, 50), 90, 0, 180, (255, 0, 255), 3)\nfont = cv.FONT_HERSHEY_SIMPLEX\ncv.putText(img, 'OpenCV', (10, 500), font, 4, (255, 0, 255), 3)\ncv.imshow('d1', img)\ncv.waitKey(0)\n",
"step-4": "import cv2 as cv\nimport numpy as np\nimg = np.zeros((512, 512, 3), np.uint8)\ncv.line(img, (0, 0), (511, 511), (255, 255, 255), 10)\ncv.rectangle(img, (384, 0), (510, 128), (255, 0, 0), 3)\ncv.circle(img, (200, 60), 20, (0, 100, 255), 3)\ncv.ellipse(img, (250, 250), (100, 50), 90, 0, 180, (255, 0, 255), 3)\nfont = cv.FONT_HERSHEY_SIMPLEX\ncv.putText(img, 'OpenCV', (10, 500), font, 4, (255, 0, 255), 3)\ncv.imshow('d1', img)\ncv.waitKey(0)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class Fail(Exception):
def __init__(self, message):
super().__init__(message)
class Student:
def __init__(self, rollNo, name, marks):
self.rollNo = rollNo
self.name = name
self.marks = marks
def displayDetails(self):
print('{} \t {} \t {}'.format(self.name, self.rollNo, self.marks))
try:
if self.marks < 40:
raise Fail('Student {} has Scored {} marks and has Failed '
.format(self.name, self.marks))
except Fail as f:
print(f)
myStudentList = []
num = int(input('Enter the number of Students : '))
for i in range(num):
rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '
.format(i + 1)).split(',')
print('----------------------------------------')
marks = int(marks)
myStudentList.append(Student(rollNo, name, marks))
print('DETAILS OF STUDENTS ARE : ')
for i in range(num):
myStudentList[i].displayDetails()
|
normal
|
{
"blob_id": "ddf074e400551d2c147d898fe876a31d13a72699",
"index": 5324,
"step-1": "<mask token>\n\n\nclass Student:\n <mask token>\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\n",
"step-2": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\n",
"step-3": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\n<mask token>\nfor i in range(num):\n rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '\n .format(i + 1)).split(',')\n print('----------------------------------------')\n marks = int(marks)\n myStudentList.append(Student(rollNo, name, marks))\nprint('DETAILS OF STUDENTS ARE : ')\nfor i in range(num):\n myStudentList[i].displayDetails()\n",
"step-4": "class Fail(Exception):\n\n def __init__(self, message):\n super().__init__(message)\n\n\nclass Student:\n\n def __init__(self, rollNo, name, marks):\n self.rollNo = rollNo\n self.name = name\n self.marks = marks\n\n def displayDetails(self):\n print('{} \\t {} \\t {}'.format(self.name, self.rollNo, self.marks))\n try:\n if self.marks < 40:\n raise Fail('Student {} has Scored {} marks and has Failed '\n .format(self.name, self.marks))\n except Fail as f:\n print(f)\n\n\nmyStudentList = []\nnum = int(input('Enter the number of Students : '))\nfor i in range(num):\n rollNo, name, marks = input('Enter Roll-no,Name,Marks of Student {} : '\n .format(i + 1)).split(',')\n print('----------------------------------------')\n marks = int(marks)\n myStudentList.append(Student(rollNo, name, marks))\nprint('DETAILS OF STUDENTS ARE : ')\nfor i in range(num):\n myStudentList[i].displayDetails()\n",
"step-5": null,
"step-ids": [
2,
5,
6,
7
]
}
|
[
2,
5,
6,
7
] |
#
# o o
# 8
# .oPYo. .oPYo. odYo. o8P o8 .oPYo. odYo. .oPYo. .oPYo.
# Yb.. 8oooo8 8' `8 8 8 8oooo8 8' `8 8 ' 8oooo8
# 'Yb. 8. 8 8 8 8 8. 8 8 8 . 8.
# `YooP' `Yooo' 8 8 8 8 `Yooo' 8 8 `YooP' `Yooo'
# :.....::.....:..::..::..::..:.....:..::..:.....::.....:
# :::::::::::::::::::::::::::::::::::::::::::::::::::::::
# :::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# Copyright Yazan Obeidi, 2017
#
# python.learning.learn - single interface for learning
#
from src.python.utils.log import init_log
from src.python.utils.config import init_config
from src.python.learning.models import Model
__author__ = 'yazan'
__version__ = '0.0.1'
__licence__ = 'Apache V2'
class Trainer(object):
"""Consumes data/dataset in streamable or batch format
and trains a single model in the available catalogue.
"""
def __init__(self, log, config, model_handle, model_schema):
""":params:
model_handle: a model object, i.e. a RandomForest clf handler
model_schema: reference to the library for that model, i.e. sklearn
"""
self.log = log
self.config = config
self.model = model_handle
self.schema = model_schema
def train(self):
pass
@property
def score(self):
pass
if __name__ = '__main__':
log = init_log()
config = init_config()
trainer = Trainer(log=log, config=config)
|
normal
|
{
"blob_id": "c6357e6e0656388fc3fd849879aa6000e0bee1ee",
"index": 1553,
"step-1": "#\n# o o \n# 8 \n# .oPYo. .oPYo. odYo. o8P o8 .oPYo. odYo. .oPYo. .oPYo. \n# Yb.. 8oooo8 8' `8 8 8 8oooo8 8' `8 8 ' 8oooo8 \n# 'Yb. 8. 8 8 8 8 8. 8 8 8 . 8. \n# `YooP' `Yooo' 8 8 8 8 `Yooo' 8 8 `YooP' `Yooo' \n# :.....::.....:..::..::..::..:.....:..::..:.....::.....:\n# :::::::::::::::::::::::::::::::::::::::::::::::::::::::\n# :::::::::::::::::::::::::::::::::::::::::::::::::::::::\n#\n# Copyright Yazan Obeidi, 2017\n#\n# python.learning.learn - single interface for learning\n#\n\nfrom src.python.utils.log import init_log\nfrom src.python.utils.config import init_config\nfrom src.python.learning.models import Model\n\n__author__ = 'yazan'\n__version__ = '0.0.1'\n__licence__ = 'Apache V2'\n\nclass Trainer(object):\n \"\"\"Consumes data/dataset in streamable or batch format\n and trains a single model in the available catalogue.\n \"\"\"\n def __init__(self, log, config, model_handle, model_schema):\n \"\"\":params:\n model_handle: a model object, i.e. a RandomForest clf handler\n model_schema: reference to the library for that model, i.e. sklearn\n \"\"\"\n self.log = log\n self.config = config\n self.model = model_handle\n self.schema = model_schema\n\n def train(self):\n pass\n\n @property\n def score(self):\n pass\n\n\nif __name__ = '__main__':\n log = init_log()\n config = init_config()\n trainer = Trainer(log=log, config=config)",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from sklearn import datasets, svm
import matplotlib.pyplot as plt
digits = datasets.load_digits()
X, y = digits.data[:-1], digits.target[:-1]
clf = svm.SVC(gamma=0.1, C=100)
clf.fit(X, y)
prediction = clf.predict(digits.data[-1:])
actual = digits.target[-1:]
print("prediction = " + str(prediction) + ", actual = " + str(actual))
plt.matshow(digits.images[-1])
plt.show()
|
normal
|
{
"blob_id": "0d98472d1c04bfc52378aa6401a47d96582696a2",
"index": 4046,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nclf.fit(X, y)\n<mask token>\nprint('prediction = ' + str(prediction) + ', actual = ' + str(actual))\nplt.matshow(digits.images[-1])\nplt.show()\n",
"step-3": "<mask token>\ndigits = datasets.load_digits()\nX, y = digits.data[:-1], digits.target[:-1]\nclf = svm.SVC(gamma=0.1, C=100)\nclf.fit(X, y)\nprediction = clf.predict(digits.data[-1:])\nactual = digits.target[-1:]\nprint('prediction = ' + str(prediction) + ', actual = ' + str(actual))\nplt.matshow(digits.images[-1])\nplt.show()\n",
"step-4": "from sklearn import datasets, svm\nimport matplotlib.pyplot as plt\ndigits = datasets.load_digits()\nX, y = digits.data[:-1], digits.target[:-1]\nclf = svm.SVC(gamma=0.1, C=100)\nclf.fit(X, y)\nprediction = clf.predict(digits.data[-1:])\nactual = digits.target[-1:]\nprint('prediction = ' + str(prediction) + ', actual = ' + str(actual))\nplt.matshow(digits.images[-1])\nplt.show()\n",
"step-5": "from sklearn import datasets, svm\nimport matplotlib.pyplot as plt\n\ndigits = datasets.load_digits()\nX, y = digits.data[:-1], digits.target[:-1]\n\nclf = svm.SVC(gamma=0.1, C=100)\nclf.fit(X, y)\n\nprediction = clf.predict(digits.data[-1:])\nactual = digits.target[-1:]\nprint(\"prediction = \" + str(prediction) + \", actual = \" + str(actual))\n\nplt.matshow(digits.images[-1])\nplt.show()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Default imports
from sklearn.feature_selection import SelectFromModel
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import numpy as np
data = pd.read_csv('data/house_prices_multivariate.csv')
# Your solution code here
def select_from_model(dataframe):
X = dataframe.iloc[:, :-1]
y = dataframe.iloc[:, -1]
np.random.seed(9)
model = RandomForestClassifier()
sfm = SelectFromModel(model)
sfm = sfm.fit(X, y)
feature_idx = sfm.get_support()
feature_name = X.columns[feature_idx]
return list(feature_name)
|
normal
|
{
"blob_id": "d6791c8122129a46631582e7d9339ea08bd2e92b",
"index": 3183,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef select_from_model(dataframe):\n X = dataframe.iloc[:, :-1]\n y = dataframe.iloc[:, -1]\n np.random.seed(9)\n model = RandomForestClassifier()\n sfm = SelectFromModel(model)\n sfm = sfm.fit(X, y)\n feature_idx = sfm.get_support()\n feature_name = X.columns[feature_idx]\n return list(feature_name)\n",
"step-3": "<mask token>\ndata = pd.read_csv('data/house_prices_multivariate.csv')\n\n\ndef select_from_model(dataframe):\n X = dataframe.iloc[:, :-1]\n y = dataframe.iloc[:, -1]\n np.random.seed(9)\n model = RandomForestClassifier()\n sfm = SelectFromModel(model)\n sfm = sfm.fit(X, y)\n feature_idx = sfm.get_support()\n feature_name = X.columns[feature_idx]\n return list(feature_name)\n",
"step-4": "from sklearn.feature_selection import SelectFromModel\nfrom sklearn.ensemble import RandomForestClassifier\nimport pandas as pd\nimport numpy as np\ndata = pd.read_csv('data/house_prices_multivariate.csv')\n\n\ndef select_from_model(dataframe):\n X = dataframe.iloc[:, :-1]\n y = dataframe.iloc[:, -1]\n np.random.seed(9)\n model = RandomForestClassifier()\n sfm = SelectFromModel(model)\n sfm = sfm.fit(X, y)\n feature_idx = sfm.get_support()\n feature_name = X.columns[feature_idx]\n return list(feature_name)\n",
"step-5": "# Default imports\nfrom sklearn.feature_selection import SelectFromModel\nfrom sklearn.ensemble import RandomForestClassifier\nimport pandas as pd\nimport numpy as np\n\ndata = pd.read_csv('data/house_prices_multivariate.csv')\n\n\n# Your solution code here\n\ndef select_from_model(dataframe):\n X = dataframe.iloc[:, :-1]\n y = dataframe.iloc[:, -1]\n np.random.seed(9)\n model = RandomForestClassifier()\n\n sfm = SelectFromModel(model)\n sfm = sfm.fit(X, y)\n\n feature_idx = sfm.get_support()\n feature_name = X.columns[feature_idx]\n\n return list(feature_name)\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# encoding=utf-8
import MySQLdb
import re
# 打开数据库连接
db = MySQLdb.connect(host='wonderfulloffline.mysql.rds.aliyuncs.com',port=3306,user='wonderfull_ai',password='868wxRHrPaTKkjvC', db='wonderfull_ai_online', charset='utf8' )
def load_stop_word():
stop_word=set()
with open("data/stop_word.txt","r",encoding="utf-8") as file:
for line in file.readlines():
stop_word.add(line.strip())
return stop_word
# 使用cursor()方法获取操作游标
def get_goods_title_dict(stop_word_dict):
cursor = db.cursor()
# 使用execute方法执行SQL语句
cursor.execute("select goods_name FROM goods")
# 使用 fetchone() 方法获取一条数据
data = cursor.fetchall()
goods_name_dict=dict()
idx=1
for line in data:
title = line[0].strip().lower()
for c in title:
if(c.strip()==''):
continue
if(c in stop_word_dict):
continue
if(c not in goods_name_dict):
goods_name_dict[c]=idx
idx=idx+1
cursor.execute("select goods_name FROM goods where is_onsell=1")
data = cursor.fetchall()
regexp = r"[0-9a-z]+"
pattern = re.compile(regexp)
for line in data:
title = line[0].strip().lower()
match_res = pattern.findall(title)
print(title,match_res)
for item in match_res:
if (item not in goods_name_dict):
goods_name_dict[item] = idx
idx = idx + 1
# 关闭数据库连接
# db.close()
return goods_name_dict
def write_dict(word_dict):
file=open("data/vocab_unigram.txt","w",encoding="utf-8")
file.write("[UNK]"+"\t"+"0"+"\n")
for k,v in word_dict.items():
# print(k,v)
file.write(k+"\t"+str(v)+"\n")
file.close()
if __name__ == '__main__':
stop_word_dict=load_stop_word()
goods_name_dict=get_goods_title_dict(stop_word_dict)
# print(goods_name_dict)
write_dict(goods_name_dict)
|
normal
|
{
"blob_id": "4942b20a8e4f58c52b82800fb4c59db169cd8048",
"index": 3562,
"step-1": "<mask token>\n\n\ndef load_stop_word():\n stop_word = set()\n with open('data/stop_word.txt', 'r', encoding='utf-8') as file:\n for line in file.readlines():\n stop_word.add(line.strip())\n return stop_word\n\n\n<mask token>\n\n\ndef write_dict(word_dict):\n file = open('data/vocab_unigram.txt', 'w', encoding='utf-8')\n file.write('[UNK]' + '\\t' + '0' + '\\n')\n for k, v in word_dict.items():\n file.write(k + '\\t' + str(v) + '\\n')\n file.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_stop_word():\n stop_word = set()\n with open('data/stop_word.txt', 'r', encoding='utf-8') as file:\n for line in file.readlines():\n stop_word.add(line.strip())\n return stop_word\n\n\ndef get_goods_title_dict(stop_word_dict):\n cursor = db.cursor()\n cursor.execute('select goods_name FROM goods')\n data = cursor.fetchall()\n goods_name_dict = dict()\n idx = 1\n for line in data:\n title = line[0].strip().lower()\n for c in title:\n if c.strip() == '':\n continue\n if c in stop_word_dict:\n continue\n if c not in goods_name_dict:\n goods_name_dict[c] = idx\n idx = idx + 1\n cursor.execute('select goods_name FROM goods where is_onsell=1')\n data = cursor.fetchall()\n regexp = '[0-9a-z]+'\n pattern = re.compile(regexp)\n for line in data:\n title = line[0].strip().lower()\n match_res = pattern.findall(title)\n print(title, match_res)\n for item in match_res:\n if item not in goods_name_dict:\n goods_name_dict[item] = idx\n idx = idx + 1\n return goods_name_dict\n\n\ndef write_dict(word_dict):\n file = open('data/vocab_unigram.txt', 'w', encoding='utf-8')\n file.write('[UNK]' + '\\t' + '0' + '\\n')\n for k, v in word_dict.items():\n file.write(k + '\\t' + str(v) + '\\n')\n file.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_stop_word():\n stop_word = set()\n with open('data/stop_word.txt', 'r', encoding='utf-8') as file:\n for line in file.readlines():\n stop_word.add(line.strip())\n return stop_word\n\n\ndef get_goods_title_dict(stop_word_dict):\n cursor = db.cursor()\n cursor.execute('select goods_name FROM goods')\n data = cursor.fetchall()\n goods_name_dict = dict()\n idx = 1\n for line in data:\n title = line[0].strip().lower()\n for c in title:\n if c.strip() == '':\n continue\n if c in stop_word_dict:\n continue\n if c not in goods_name_dict:\n goods_name_dict[c] = idx\n idx = idx + 1\n cursor.execute('select goods_name FROM goods where is_onsell=1')\n data = cursor.fetchall()\n regexp = '[0-9a-z]+'\n pattern = re.compile(regexp)\n for line in data:\n title = line[0].strip().lower()\n match_res = pattern.findall(title)\n print(title, match_res)\n for item in match_res:\n if item not in goods_name_dict:\n goods_name_dict[item] = idx\n idx = idx + 1\n return goods_name_dict\n\n\ndef write_dict(word_dict):\n file = open('data/vocab_unigram.txt', 'w', encoding='utf-8')\n file.write('[UNK]' + '\\t' + '0' + '\\n')\n for k, v in word_dict.items():\n file.write(k + '\\t' + str(v) + '\\n')\n file.close()\n\n\nif __name__ == '__main__':\n stop_word_dict = load_stop_word()\n goods_name_dict = get_goods_title_dict(stop_word_dict)\n write_dict(goods_name_dict)\n",
"step-4": "<mask token>\ndb = MySQLdb.connect(host='wonderfulloffline.mysql.rds.aliyuncs.com', port=\n 3306, user='wonderfull_ai', password='868wxRHrPaTKkjvC', db=\n 'wonderfull_ai_online', charset='utf8')\n\n\ndef load_stop_word():\n stop_word = set()\n with open('data/stop_word.txt', 'r', encoding='utf-8') as file:\n for line in file.readlines():\n stop_word.add(line.strip())\n return stop_word\n\n\ndef get_goods_title_dict(stop_word_dict):\n cursor = db.cursor()\n cursor.execute('select goods_name FROM goods')\n data = cursor.fetchall()\n goods_name_dict = dict()\n idx = 1\n for line in data:\n title = line[0].strip().lower()\n for c in title:\n if c.strip() == '':\n continue\n if c in stop_word_dict:\n continue\n if c not in goods_name_dict:\n goods_name_dict[c] = idx\n idx = idx + 1\n cursor.execute('select goods_name FROM goods where is_onsell=1')\n data = cursor.fetchall()\n regexp = '[0-9a-z]+'\n pattern = re.compile(regexp)\n for line in data:\n title = line[0].strip().lower()\n match_res = pattern.findall(title)\n print(title, match_res)\n for item in match_res:\n if item not in goods_name_dict:\n goods_name_dict[item] = idx\n idx = idx + 1\n return goods_name_dict\n\n\ndef write_dict(word_dict):\n file = open('data/vocab_unigram.txt', 'w', encoding='utf-8')\n file.write('[UNK]' + '\\t' + '0' + '\\n')\n for k, v in word_dict.items():\n file.write(k + '\\t' + str(v) + '\\n')\n file.close()\n\n\nif __name__ == '__main__':\n stop_word_dict = load_stop_word()\n goods_name_dict = get_goods_title_dict(stop_word_dict)\n write_dict(goods_name_dict)\n",
"step-5": "#!/usr/bin/env python\r\n# encoding=utf-8\r\nimport MySQLdb\r\nimport re\r\n\r\n# 打开数据库连接\r\ndb = MySQLdb.connect(host='wonderfulloffline.mysql.rds.aliyuncs.com',port=3306,user='wonderfull_ai',password='868wxRHrPaTKkjvC', db='wonderfull_ai_online', charset='utf8' )\r\n\r\ndef load_stop_word():\r\n stop_word=set()\r\n with open(\"data/stop_word.txt\",\"r\",encoding=\"utf-8\") as file:\r\n for line in file.readlines():\r\n stop_word.add(line.strip())\r\n return stop_word\r\n\r\n# 使用cursor()方法获取操作游标\r\ndef get_goods_title_dict(stop_word_dict):\r\n cursor = db.cursor()\r\n # 使用execute方法执行SQL语句\r\n cursor.execute(\"select goods_name FROM goods\")\r\n # 使用 fetchone() 方法获取一条数据\r\n data = cursor.fetchall()\r\n goods_name_dict=dict()\r\n idx=1\r\n\r\n for line in data:\r\n title = line[0].strip().lower()\r\n for c in title:\r\n if(c.strip()==''):\r\n continue\r\n if(c in stop_word_dict):\r\n continue\r\n if(c not in goods_name_dict):\r\n goods_name_dict[c]=idx\r\n idx=idx+1\r\n\r\n cursor.execute(\"select goods_name FROM goods where is_onsell=1\")\r\n data = cursor.fetchall()\r\n regexp = r\"[0-9a-z]+\"\r\n pattern = re.compile(regexp)\r\n for line in data:\r\n title = line[0].strip().lower()\r\n match_res = pattern.findall(title)\r\n print(title,match_res)\r\n for item in match_res:\r\n if (item not in goods_name_dict):\r\n goods_name_dict[item] = idx\r\n idx = idx + 1\r\n\r\n # 关闭数据库连接\r\n # db.close()\r\n return goods_name_dict\r\n\r\ndef write_dict(word_dict):\r\n file=open(\"data/vocab_unigram.txt\",\"w\",encoding=\"utf-8\")\r\n file.write(\"[UNK]\"+\"\\t\"+\"0\"+\"\\n\")\r\n for k,v in word_dict.items():\r\n # print(k,v)\r\n file.write(k+\"\\t\"+str(v)+\"\\n\")\r\n file.close()\r\n\r\nif __name__ == '__main__':\r\n stop_word_dict=load_stop_word()\r\n goods_name_dict=get_goods_title_dict(stop_word_dict)\r\n # print(goods_name_dict)\r\n write_dict(goods_name_dict)",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name = 'index'),
path('about/', views.about, name='about'),
path('contact/', views.contact, name= 'contact'),
path('category/', views.category, name='category'),
path('product/<str:id>/<slug:slug>',views.product_list, name='product_list'),
path('product-detail/<str:id>/<slug:slug>', views.prod_detail, name= 'prod_detail'),
]
|
normal
|
{
"blob_id": "0588aad1536a81d047a2a2b91f83fdde4d1be974",
"index": 3869,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('', views.index, name='index'), path('about/', views.\n about, name='about'), path('contact/', views.contact, name='contact'),\n path('category/', views.category, name='category'), path(\n 'product/<str:id>/<slug:slug>', views.product_list, name='product_list'\n ), path('product-detail/<str:id>/<slug:slug>', views.prod_detail, name=\n 'prod_detail')]\n",
"step-3": "from django.urls import path\nfrom . import views\nurlpatterns = [path('', views.index, name='index'), path('about/', views.\n about, name='about'), path('contact/', views.contact, name='contact'),\n path('category/', views.category, name='category'), path(\n 'product/<str:id>/<slug:slug>', views.product_list, name='product_list'\n ), path('product-detail/<str:id>/<slug:slug>', views.prod_detail, name=\n 'prod_detail')]\n",
"step-4": "from django.urls import path\nfrom . import views\n\n\nurlpatterns = [\n path('', views.index, name = 'index'),\n path('about/', views.about, name='about'),\n path('contact/', views.contact, name= 'contact'),\n path('category/', views.category, name='category'),\n path('product/<str:id>/<slug:slug>',views.product_list, name='product_list'),\n path('product-detail/<str:id>/<slug:slug>', views.prod_detail, name= 'prod_detail'),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Ui_MainWindow(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName('MainWindow')
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName('centralwidget')
MainWindow.setCentralWidget(self.centralwidget)
MainWindow.setWindowIcon(QIcon('data/nn.png'))
MainWindow.resize(800, 800)
self.OK = QtWidgets.QPushButton(self.centralwidget)
self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')
self.OK.setIcon(QIcon('data/ok.png'))
self.OK.setIconSize(QSize(40, 40))
self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))
font = QtGui.QFont()
font.setPointSize(10)
self.OK.setFont(font)
self.OK.setAutoFillBackground(True)
self.OK.setObjectName('OK')
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName('MainWindow')
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName('centralwidget')
MainWindow.setCentralWidget(self.centralwidget)
MainWindow.setWindowIcon(QIcon('data/nn.png'))
MainWindow.resize(800, 800)
self.OK = QtWidgets.QPushButton(self.centralwidget)
self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')
self.OK.setIcon(QIcon('data/ok.png'))
self.OK.setIconSize(QSize(40, 40))
self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))
font = QtGui.QFont()
font.setPointSize(10)
self.OK.setFont(font)
self.OK.setAutoFillBackground(True)
self.OK.setObjectName('OK')
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))
self.OK.setText(_translate('MainWindow', ' OK'))
<|reserved_special_token_1|>
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import *
from PyQt5.QtCore import *
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName('MainWindow')
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName('centralwidget')
MainWindow.setCentralWidget(self.centralwidget)
MainWindow.setWindowIcon(QIcon('data/nn.png'))
MainWindow.resize(800, 800)
self.OK = QtWidgets.QPushButton(self.centralwidget)
self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')
self.OK.setIcon(QIcon('data/ok.png'))
self.OK.setIconSize(QSize(40, 40))
self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))
font = QtGui.QFont()
font.setPointSize(10)
self.OK.setFont(font)
self.OK.setAutoFillBackground(True)
self.OK.setObjectName('OK')
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))
self.OK.setText(_translate('MainWindow', ' OK'))
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import *
from PyQt5.QtCore import *
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
MainWindow.setCentralWidget(self.centralwidget)
MainWindow.setWindowIcon(QIcon('data/nn.png'))
MainWindow.resize(800, 800)
self.OK = QtWidgets.QPushButton(self.centralwidget)
self.OK.setStyleSheet("background-color:#18BDFF; border-radius: 5px;");
self.OK.setIcon(QIcon("data/ok.png"))
self.OK.setIconSize(QSize(40, 40))
self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))
font = QtGui.QFont()
font.setPointSize(10)
self.OK.setFont(font)
self.OK.setAutoFillBackground(True)
self.OK.setObjectName("OK")
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Drawing digits"))
self.OK.setText(_translate("MainWindow", " OK"))
|
flexible
|
{
"blob_id": "65264f52f641b67c707b6a827ecfe1bf417748e8",
"index": 2379,
"step-1": "<mask token>\n\n\nclass Ui_MainWindow(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))\n self.OK.setText(_translate('MainWindow', ' OK'))\n",
"step-4": "from PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import *\nfrom PyQt5.QtCore import *\n\n\nclass Ui_MainWindow(object):\n\n def setupUi(self, MainWindow):\n MainWindow.setObjectName('MainWindow')\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName('centralwidget')\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet('background-color:#18BDFF; border-radius: 5px;')\n self.OK.setIcon(QIcon('data/ok.png'))\n self.OK.setIconSize(QSize(40, 40))\n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName('OK')\n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n\n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate('MainWindow', 'Drawing digits'))\n self.OK.setText(_translate('MainWindow', ' OK'))\n",
"step-5": "# -*- coding: utf-8 -*-\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\nfrom PyQt5.QtGui import *\nfrom PyQt5.QtCore import *\n\nclass Ui_MainWindow(object):\n def setupUi(self, MainWindow):\n MainWindow.setObjectName(\"MainWindow\")\n self.centralwidget = QtWidgets.QWidget(MainWindow)\n self.centralwidget.setObjectName(\"centralwidget\")\n MainWindow.setCentralWidget(self.centralwidget)\n MainWindow.setWindowIcon(QIcon('data/nn.png'))\n MainWindow.resize(800, 800)\n \n \n self.OK = QtWidgets.QPushButton(self.centralwidget)\n self.OK.setStyleSheet(\"background-color:#18BDFF; border-radius: 5px;\");\n self.OK.setIcon(QIcon(\"data/ok.png\"))\n self.OK.setIconSize(QSize(40, 40)) \n self.OK.setGeometry(QtCore.QRect(375, 820, 150, 45))\n font = QtGui.QFont()\n font.setPointSize(10)\n self.OK.setFont(font)\n self.OK.setAutoFillBackground(True)\n self.OK.setObjectName(\"OK\")\n \n \n self.retranslateUi(MainWindow)\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\n \n \n \n def retranslateUi(self, MainWindow):\n _translate = QtCore.QCoreApplication.translate\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"Drawing digits\"))\n self.OK.setText(_translate(\"MainWindow\", \" OK\"))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def preprocess_image(img):
img = img.astype(np.uint8)
channel_b, channel_g, channel_r = cv2.split(img)
result = ndimage.maximum_filter(channel_g, size=5)
ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)
clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))
clahe_g = clahe.apply(channel_g)
image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))
image[:, :, 0] = channel_g
image[:, :, 1] = clahe_g
image[:, :, 2] = result
image = image.astype(np.uint8)
image = img_to_array(image)
return image
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def preprocess_image(img):
img = img.astype(np.uint8)
channel_b, channel_g, channel_r = cv2.split(img)
result = ndimage.maximum_filter(channel_g, size=5)
ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)
clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))
clahe_g = clahe.apply(channel_g)
image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))
image[:, :, 0] = channel_g
image[:, :, 1] = clahe_g
image[:, :, 2] = result
image = image.astype(np.uint8)
image = img_to_array(image)
return image
def preprocess_mask(img):
img = img.astype(np.uint8)
return img[:, :, 0].reshape((256, 256, 1))
<|reserved_special_token_1|>
import os
import random
import cv2
import numpy as np
from keras.preprocessing.image import img_to_array
import numpy as np
import keras
from scipy import ndimage, misc
def preprocess_image(img):
img = img.astype(np.uint8)
channel_b, channel_g, channel_r = cv2.split(img)
result = ndimage.maximum_filter(channel_g, size=5)
ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)
clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))
clahe_g = clahe.apply(channel_g)
image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))
image[:, :, 0] = channel_g
image[:, :, 1] = clahe_g
image[:, :, 2] = result
image = image.astype(np.uint8)
image = img_to_array(image)
return image
def preprocess_mask(img):
img = img.astype(np.uint8)
return img[:, :, 0].reshape((256, 256, 1))
<|reserved_special_token_1|>
import os
import random
import cv2
import numpy as np
from keras.preprocessing.image import img_to_array
import numpy as np
import keras
from scipy import ndimage, misc
def preprocess_image(img):
img = img.astype(np.uint8)
(channel_b, channel_g, channel_r) = cv2.split(img)
result = ndimage.maximum_filter(channel_g, size=5)
# ret3,result = cv2.threshold(result,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
ret,result = cv2.threshold(channel_g,120,255,cv2.THRESH_BINARY_INV)
clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))
clahe_g = clahe.apply(channel_g)
image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))
image[:,:,0] = channel_g
image[:,:,1] = clahe_g
image[:,:,2] = result
image = image.astype(np.uint8)
image = img_to_array(image)
return image
def preprocess_mask(img):
img = img.astype(np.uint8)
return img[:,:,0].reshape((256,256,1))
# img=cv2.imread("/home/team6/Project/MiMM_SBILab/patches/train/images/0/1015.jpg")
# img_result=preprocess_image(img)
# cv2.imwrite("preprocess.jpg",img_result)
|
flexible
|
{
"blob_id": "586d39556d2922a288a2bef3bcffbc6f9e3dc39d",
"index": 6707,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef preprocess_image(img):\n img = img.astype(np.uint8)\n channel_b, channel_g, channel_r = cv2.split(img)\n result = ndimage.maximum_filter(channel_g, size=5)\n ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)\n clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))\n clahe_g = clahe.apply(channel_g)\n image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))\n image[:, :, 0] = channel_g\n image[:, :, 1] = clahe_g\n image[:, :, 2] = result\n image = image.astype(np.uint8)\n image = img_to_array(image)\n return image\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef preprocess_image(img):\n img = img.astype(np.uint8)\n channel_b, channel_g, channel_r = cv2.split(img)\n result = ndimage.maximum_filter(channel_g, size=5)\n ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)\n clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))\n clahe_g = clahe.apply(channel_g)\n image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))\n image[:, :, 0] = channel_g\n image[:, :, 1] = clahe_g\n image[:, :, 2] = result\n image = image.astype(np.uint8)\n image = img_to_array(image)\n return image\n\n\ndef preprocess_mask(img):\n img = img.astype(np.uint8)\n return img[:, :, 0].reshape((256, 256, 1))\n",
"step-4": "import os\nimport random\nimport cv2\nimport numpy as np\nfrom keras.preprocessing.image import img_to_array\nimport numpy as np\nimport keras\nfrom scipy import ndimage, misc\n\n\ndef preprocess_image(img):\n img = img.astype(np.uint8)\n channel_b, channel_g, channel_r = cv2.split(img)\n result = ndimage.maximum_filter(channel_g, size=5)\n ret, result = cv2.threshold(channel_g, 120, 255, cv2.THRESH_BINARY_INV)\n clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))\n clahe_g = clahe.apply(channel_g)\n image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))\n image[:, :, 0] = channel_g\n image[:, :, 1] = clahe_g\n image[:, :, 2] = result\n image = image.astype(np.uint8)\n image = img_to_array(image)\n return image\n\n\ndef preprocess_mask(img):\n img = img.astype(np.uint8)\n return img[:, :, 0].reshape((256, 256, 1))\n",
"step-5": "import os\nimport random\nimport cv2\nimport numpy as np\nfrom keras.preprocessing.image import img_to_array\nimport numpy as np\nimport keras\nfrom scipy import ndimage, misc\n\ndef preprocess_image(img):\n img = img.astype(np.uint8)\n (channel_b, channel_g, channel_r) = cv2.split(img)\n\n result = ndimage.maximum_filter(channel_g, size=5)\n # ret3,result = cv2.threshold(result,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)\n ret,result = cv2.threshold(channel_g,120,255,cv2.THRESH_BINARY_INV)\n\n clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(11, 11))\n clahe_g = clahe.apply(channel_g)\n\n image = np.zeros((img.shape[0], img.shape[1], img.shape[2]))\n\n image[:,:,0] = channel_g\n image[:,:,1] = clahe_g\n image[:,:,2] = result\n\n image = image.astype(np.uint8)\n\n image = img_to_array(image)\n\n return image\n\ndef preprocess_mask(img):\n img = img.astype(np.uint8)\n return img[:,:,0].reshape((256,256,1))\n\n\n# img=cv2.imread(\"/home/team6/Project/MiMM_SBILab/patches/train/images/0/1015.jpg\")\n# img_result=preprocess_image(img)\n# cv2.imwrite(\"preprocess.jpg\",img_result)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('juchu', '0003_auto_20190718_1500')]
operations = [migrations.RemoveField(model_name='order', name='product'
), migrations.RemoveField(model_name='order', name='quantity'),
migrations.CreateModel(name='OrderProduct', fields=[('id', models.
AutoField(auto_created=True, primary_key=True, serialize=False,
verbose_name='ID')), ('quantity', models.IntegerField(default=1)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.
PROTECT, to='juchu.Order')), ('product', models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to='juchu.Product'))])]
<|reserved_special_token_1|>
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [('juchu', '0003_auto_20190718_1500')]
operations = [migrations.RemoveField(model_name='order', name='product'
), migrations.RemoveField(model_name='order', name='quantity'),
migrations.CreateModel(name='OrderProduct', fields=[('id', models.
AutoField(auto_created=True, primary_key=True, serialize=False,
verbose_name='ID')), ('quantity', models.IntegerField(default=1)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.
PROTECT, to='juchu.Order')), ('product', models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to='juchu.Product'))])]
<|reserved_special_token_1|>
# Generated by Django 2.2.3 on 2019-07-18 06:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('juchu', '0003_auto_20190718_1500'),
]
operations = [
migrations.RemoveField(
model_name='order',
name='product',
),
migrations.RemoveField(
model_name='order',
name='quantity',
),
migrations.CreateModel(
name='OrderProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(default=1)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='juchu.Order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='juchu.Product')),
],
),
]
|
flexible
|
{
"blob_id": "b0174b6f6c33434ff9b5cdb59531502899d8348a",
"index": 4262,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('juchu', '0003_auto_20190718_1500')]\n operations = [migrations.RemoveField(model_name='order', name='product'\n ), migrations.RemoveField(model_name='order', name='quantity'),\n migrations.CreateModel(name='OrderProduct', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('quantity', models.IntegerField(default=1)),\n ('order', models.ForeignKey(on_delete=django.db.models.deletion.\n PROTECT, to='juchu.Order')), ('product', models.ForeignKey(\n on_delete=django.db.models.deletion.PROTECT, to='juchu.Product'))])]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('juchu', '0003_auto_20190718_1500')]\n operations = [migrations.RemoveField(model_name='order', name='product'\n ), migrations.RemoveField(model_name='order', name='quantity'),\n migrations.CreateModel(name='OrderProduct', fields=[('id', models.\n AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('quantity', models.IntegerField(default=1)),\n ('order', models.ForeignKey(on_delete=django.db.models.deletion.\n PROTECT, to='juchu.Order')), ('product', models.ForeignKey(\n on_delete=django.db.models.deletion.PROTECT, to='juchu.Product'))])]\n",
"step-5": "# Generated by Django 2.2.3 on 2019-07-18 06:05\r\n\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('juchu', '0003_auto_20190718_1500'),\r\n ]\r\n\r\n operations = [\r\n migrations.RemoveField(\r\n model_name='order',\r\n name='product',\r\n ),\r\n migrations.RemoveField(\r\n model_name='order',\r\n name='quantity',\r\n ),\r\n migrations.CreateModel(\r\n name='OrderProduct',\r\n fields=[\r\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('quantity', models.IntegerField(default=1)),\r\n ('order', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='juchu.Order')),\r\n ('product', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='juchu.Product')),\r\n ],\r\n ),\r\n ]\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
@dataclass
class Book:
id: int
title: str
author: str
genre: str
published: date
status: str = 'Available'
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@dataclass
class Book:
id: int
title: str
author: str
genre: str
published: date
status: str = 'Available'
<|reserved_special_token_0|>
def get_more_information(self):
return (
f'Gatunek: {self.genre}\nData publikacji: {self.published}\nStatus: {self.status}'
)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@dataclass
class Book:
id: int
title: str
author: str
genre: str
published: date
status: str = 'Available'
def __str__(self):
return f'{self.id}: {self.title} by {self.author}'
def get_more_information(self):
return (
f'Gatunek: {self.genre}\nData publikacji: {self.published}\nStatus: {self.status}'
)
<|reserved_special_token_1|>
from dataclasses import dataclass
from datetime import date
@dataclass
class Book:
id: int
title: str
author: str
genre: str
published: date
status: str = 'Available'
def __str__(self):
return f'{self.id}: {self.title} by {self.author}'
def get_more_information(self):
return (
f'Gatunek: {self.genre}\nData publikacji: {self.published}\nStatus: {self.status}'
)
<|reserved_special_token_1|>
from dataclasses import dataclass
from datetime import date
@dataclass
class Book:
id: int
title: str
author: str
genre: str
published: date
status: str = 'Available'
def __str__(self):
return f'{self.id}: {self.title} by {self.author}'
def get_more_information(self):
return f"Gatunek: {self.genre}\nData publikacji: {self.published}\nStatus: {self.status}"
|
flexible
|
{
"blob_id": "dc13ca17bff8e2a5254c7758bd7274926bafd454",
"index": 5312,
"step-1": "<mask token>\n\n\n@dataclass\nclass Book:\n id: int\n title: str\n author: str\n genre: str\n published: date\n status: str = 'Available'\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\n@dataclass\nclass Book:\n id: int\n title: str\n author: str\n genre: str\n published: date\n status: str = 'Available'\n <mask token>\n\n def get_more_information(self):\n return (\n f'Gatunek: {self.genre}\\nData publikacji: {self.published}\\nStatus: {self.status}'\n )\n",
"step-3": "<mask token>\n\n\n@dataclass\nclass Book:\n id: int\n title: str\n author: str\n genre: str\n published: date\n status: str = 'Available'\n\n def __str__(self):\n return f'{self.id}: {self.title} by {self.author}'\n\n def get_more_information(self):\n return (\n f'Gatunek: {self.genre}\\nData publikacji: {self.published}\\nStatus: {self.status}'\n )\n",
"step-4": "from dataclasses import dataclass\nfrom datetime import date\n\n\n@dataclass\nclass Book:\n id: int\n title: str\n author: str\n genre: str\n published: date\n status: str = 'Available'\n\n def __str__(self):\n return f'{self.id}: {self.title} by {self.author}'\n\n def get_more_information(self):\n return (\n f'Gatunek: {self.genre}\\nData publikacji: {self.published}\\nStatus: {self.status}'\n )\n",
"step-5": "from dataclasses import dataclass\nfrom datetime import date\n\n\n@dataclass\nclass Book:\n id: int\n title: str\n author: str\n genre: str\n published: date\n status: str = 'Available'\n\n def __str__(self):\n return f'{self.id}: {self.title} by {self.author}'\n\n def get_more_information(self):\n return f\"Gatunek: {self.genre}\\nData publikacji: {self.published}\\nStatus: {self.status}\"\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.shortcuts import render_to_response
from mousedb.animal.models import Animal, Strain
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.db import connection
import datetime
@login_required
def todo(request):
eartag_list = Animal.objects.filter(MouseID__isnull=True, Alive=True).order_by('Strain','Background','Rack','Cage')
genotype_list = Animal.objects.filter(Genotype="N.D.", Alive=True).exclude(Strain__Strain="C57BL/6").order_by('Strain','Background','Rack','Cage')
wean = datetime.date.today() - datetime.timedelta(days=30)
wean_list = Animal.objects.filter(Born__gt=wean).filter(Weaned=None,Alive=True).exclude(Strain=2).order_by('Strain','Background','Rack','Cage')
return render_to_response('todo.html', {'eartag_list':eartag_list, 'wean_list':wean_list, 'genotype_list':genotype_list},context_instance=RequestContext(request))
@login_required
def home(request):
cursor = connection.cursor()
cage_list = Animal.objects.values("Cage")
cage_list_current = Animal.objects.filter(Alive=True).values("Cage")
animal_list = Animal.objects.all()
animal_list_current = Animal.objects.filter(Alive=True)
strain_list = Strain.objects.all()
strain_list_current = Strain.objects.filter(animal__Alive=True)
return render_to_response('home.html', {'animal_list':animal_list, 'animal_list_current':animal_list_current, 'strain_list':strain_list, 'strain_list_current':strain_list_current, 'cage_list':cage_list, 'cage_list_current':cage_list_current},context_instance=RequestContext(request))
|
normal
|
{
"blob_id": "89518f43934710ef2e7471a91128e20d2306d6f6",
"index": 9291,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@login_required\ndef todo(request):\n eartag_list = Animal.objects.filter(MouseID__isnull=True, Alive=True\n ).order_by('Strain', 'Background', 'Rack', 'Cage')\n genotype_list = Animal.objects.filter(Genotype='N.D.', Alive=True).exclude(\n Strain__Strain='C57BL/6').order_by('Strain', 'Background', 'Rack',\n 'Cage')\n wean = datetime.date.today() - datetime.timedelta(days=30)\n wean_list = Animal.objects.filter(Born__gt=wean).filter(Weaned=None,\n Alive=True).exclude(Strain=2).order_by('Strain', 'Background',\n 'Rack', 'Cage')\n return render_to_response('todo.html', {'eartag_list': eartag_list,\n 'wean_list': wean_list, 'genotype_list': genotype_list},\n context_instance=RequestContext(request))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@login_required\ndef todo(request):\n eartag_list = Animal.objects.filter(MouseID__isnull=True, Alive=True\n ).order_by('Strain', 'Background', 'Rack', 'Cage')\n genotype_list = Animal.objects.filter(Genotype='N.D.', Alive=True).exclude(\n Strain__Strain='C57BL/6').order_by('Strain', 'Background', 'Rack',\n 'Cage')\n wean = datetime.date.today() - datetime.timedelta(days=30)\n wean_list = Animal.objects.filter(Born__gt=wean).filter(Weaned=None,\n Alive=True).exclude(Strain=2).order_by('Strain', 'Background',\n 'Rack', 'Cage')\n return render_to_response('todo.html', {'eartag_list': eartag_list,\n 'wean_list': wean_list, 'genotype_list': genotype_list},\n context_instance=RequestContext(request))\n\n\n@login_required\ndef home(request):\n cursor = connection.cursor()\n cage_list = Animal.objects.values('Cage')\n cage_list_current = Animal.objects.filter(Alive=True).values('Cage')\n animal_list = Animal.objects.all()\n animal_list_current = Animal.objects.filter(Alive=True)\n strain_list = Strain.objects.all()\n strain_list_current = Strain.objects.filter(animal__Alive=True)\n return render_to_response('home.html', {'animal_list': animal_list,\n 'animal_list_current': animal_list_current, 'strain_list':\n strain_list, 'strain_list_current': strain_list_current,\n 'cage_list': cage_list, 'cage_list_current': cage_list_current},\n context_instance=RequestContext(request))\n",
"step-4": "from django.shortcuts import render_to_response\nfrom mousedb.animal.models import Animal, Strain\nfrom django.contrib.auth.decorators import login_required\nfrom django.template import RequestContext\nfrom django.db import connection\nimport datetime\n\n\n@login_required\ndef todo(request):\n eartag_list = Animal.objects.filter(MouseID__isnull=True, Alive=True\n ).order_by('Strain', 'Background', 'Rack', 'Cage')\n genotype_list = Animal.objects.filter(Genotype='N.D.', Alive=True).exclude(\n Strain__Strain='C57BL/6').order_by('Strain', 'Background', 'Rack',\n 'Cage')\n wean = datetime.date.today() - datetime.timedelta(days=30)\n wean_list = Animal.objects.filter(Born__gt=wean).filter(Weaned=None,\n Alive=True).exclude(Strain=2).order_by('Strain', 'Background',\n 'Rack', 'Cage')\n return render_to_response('todo.html', {'eartag_list': eartag_list,\n 'wean_list': wean_list, 'genotype_list': genotype_list},\n context_instance=RequestContext(request))\n\n\n@login_required\ndef home(request):\n cursor = connection.cursor()\n cage_list = Animal.objects.values('Cage')\n cage_list_current = Animal.objects.filter(Alive=True).values('Cage')\n animal_list = Animal.objects.all()\n animal_list_current = Animal.objects.filter(Alive=True)\n strain_list = Strain.objects.all()\n strain_list_current = Strain.objects.filter(animal__Alive=True)\n return render_to_response('home.html', {'animal_list': animal_list,\n 'animal_list_current': animal_list_current, 'strain_list':\n strain_list, 'strain_list_current': strain_list_current,\n 'cage_list': cage_list, 'cage_list_current': cage_list_current},\n context_instance=RequestContext(request))\n",
"step-5": "from django.shortcuts import render_to_response\nfrom mousedb.animal.models import Animal, Strain\nfrom django.contrib.auth.decorators import login_required\nfrom django.template import RequestContext\nfrom django.db import connection\nimport datetime\n\n@login_required\ndef todo(request):\n\teartag_list = Animal.objects.filter(MouseID__isnull=True, Alive=True).order_by('Strain','Background','Rack','Cage')\n\tgenotype_list = Animal.objects.filter(Genotype=\"N.D.\", Alive=True).exclude(Strain__Strain=\"C57BL/6\").order_by('Strain','Background','Rack','Cage')\n\twean = datetime.date.today() - datetime.timedelta(days=30)\n\twean_list = Animal.objects.filter(Born__gt=wean).filter(Weaned=None,Alive=True).exclude(Strain=2).order_by('Strain','Background','Rack','Cage')\n\treturn render_to_response('todo.html', {'eartag_list':eartag_list, 'wean_list':wean_list, 'genotype_list':genotype_list},context_instance=RequestContext(request))\n\n@login_required\ndef home(request):\n\tcursor = connection.cursor()\n\tcage_list = Animal.objects.values(\"Cage\")\n\tcage_list_current = Animal.objects.filter(Alive=True).values(\"Cage\")\n\tanimal_list = Animal.objects.all()\n\tanimal_list_current = Animal.objects.filter(Alive=True)\n\tstrain_list = Strain.objects.all()\n\tstrain_list_current = Strain.objects.filter(animal__Alive=True)\n\treturn render_to_response('home.html', {'animal_list':animal_list, 'animal_list_current':animal_list_current, 'strain_list':strain_list, 'strain_list_current':strain_list_current, 'cage_list':cage_list, 'cage_list_current':cage_list_current},context_instance=RequestContext(request))\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-07-21 12:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='history',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uname', models.CharField(max_length=50, verbose_name='用户名')),
('uword', models.CharField(max_length=50, verbose_name='单词')),
('time', models.DateTimeField(auto_now=True, verbose_name='查询时间')),
('isban', models.BooleanField(default=False, verbose_name='禁用')),
('isdelete', models.BooleanField(default=False, verbose_name='删除')),
],
),
]
|
normal
|
{
"blob_id": "722739086d2777085fdbfdbddef205aaf025580d",
"index": 4291,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('user', '0001_initial')]\n operations = [migrations.CreateModel(name='history', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('uname', models.CharField(max_length=\n 50, verbose_name='用户名')), ('uword', models.CharField(max_length=50,\n verbose_name='单词')), ('time', models.DateTimeField(auto_now=True,\n verbose_name='查询时间')), ('isban', models.BooleanField(default=False,\n verbose_name='禁用')), ('isdelete', models.BooleanField(default=False,\n verbose_name='删除'))])]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('user', '0001_initial')]\n operations = [migrations.CreateModel(name='history', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('uname', models.CharField(max_length=\n 50, verbose_name='用户名')), ('uword', models.CharField(max_length=50,\n verbose_name='单词')), ('time', models.DateTimeField(auto_now=True,\n verbose_name='查询时间')), ('isban', models.BooleanField(default=False,\n verbose_name='禁用')), ('isdelete', models.BooleanField(default=False,\n verbose_name='删除'))])]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.8 on 2018-07-21 12:51\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('user', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='history',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('uname', models.CharField(max_length=50, verbose_name='用户名')),\n ('uword', models.CharField(max_length=50, verbose_name='单词')),\n ('time', models.DateTimeField(auto_now=True, verbose_name='查询时间')),\n ('isban', models.BooleanField(default=False, verbose_name='禁用')),\n ('isdelete', models.BooleanField(default=False, verbose_name='删除')),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if ratio <= 2:
print('😊')
else:
print('⚠️')
print('Ratio is', ratio)
<|reserved_special_token_1|>
debt = 100
equity = 50
ratio = debt / equity
if ratio <= 2:
print('😊')
else:
print('⚠️')
print('Ratio is', ratio)
|
flexible
|
{
"blob_id": "40b1fac14aaa81039aec8e80ce1c91bb881cfe78",
"index": 3474,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif ratio <= 2:\n print('😊')\nelse:\n print('⚠️')\nprint('Ratio is', ratio)\n",
"step-3": "debt = 100\nequity = 50\nratio = debt / equity\nif ratio <= 2:\n print('😊')\nelse:\n print('⚠️')\nprint('Ratio is', ratio)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class Message(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Message(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __unicode__(self):
return self.text + ' : ' + str(self.votes) + ' : ' + str(self.
date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote
) + '\n'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Message(models.Model):
text = models.CharField(max_length=200)
votes = models.IntegerField()
date_added = models.DateTimeField(default=datetime.now)
score = models.BigIntegerField()
next_vote = models.IntegerField(default=3600)
def __unicode__(self):
return self.text + ' : ' + str(self.votes) + ' : ' + str(self.
date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote
) + '\n'
<|reserved_special_token_1|>
from django.db import models
from datetime import datetime
class Message(models.Model):
text = models.CharField(max_length=200)
votes = models.IntegerField()
date_added = models.DateTimeField(default=datetime.now)
score = models.BigIntegerField()
next_vote = models.IntegerField(default=3600)
def __unicode__(self):
return self.text + ' : ' + str(self.votes) + ' : ' + str(self.
date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote
) + '\n'
<|reserved_special_token_1|>
from django.db import models
from datetime import datetime
class Message(models.Model):
text = models.CharField(max_length=200)
votes = models.IntegerField()
date_added = models.DateTimeField(default=datetime.now)
score = models.BigIntegerField()
next_vote = models.IntegerField(default=3600) # 86400 seconds in a day
def __unicode__(self):
return self.text + ' : '+ str(self.votes) + ' : '+str(self.date_added) + ' : ' + str(self.score) + ' : '+str(self.next_vote) + '\n'
|
flexible
|
{
"blob_id": "7159b447ed6fcb2005f63c7b7359970defbc9d43",
"index": 1496,
"step-1": "<mask token>\n\n\nclass Message(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Message(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-3": "<mask token>\n\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600)\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-4": "from django.db import models\nfrom datetime import datetime\n\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600)\n\n def __unicode__(self):\n return self.text + ' : ' + str(self.votes) + ' : ' + str(self.\n date_added) + ' : ' + str(self.score) + ' : ' + str(self.next_vote\n ) + '\\n'\n",
"step-5": "from django.db import models\nfrom datetime import datetime\n\nclass Message(models.Model):\n text = models.CharField(max_length=200)\n votes = models.IntegerField()\n date_added = models.DateTimeField(default=datetime.now)\n score = models.BigIntegerField()\n next_vote = models.IntegerField(default=3600) # 86400 seconds in a day\n\n def __unicode__(self):\n return self.text + ' : '+ str(self.votes) + ' : '+str(self.date_added) + ' : ' + str(self.score) + ' : '+str(self.next_vote) + '\\n'\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# -*- coding: UTF-8 -*-
# File Name: ll.py
# Author: Sam
# mail: [email protected]
# Created Time: 2016年03月09日 星期三 19时18分02秒
#########################################################################
#!/usr/bin/env python
def checkmark(marks):
if not isinstance(marks,list):
return 'marks Error'
else:
mark = float(sum(marks))/len(marks)
if mark >= 90:
return 'A'
elif mark >= 80:
return 'B'
elif mark >= 70:
return 'C'
elif mark >= 60:
return 'D'
else:
return 'F'
##l = [100,80,90,90]
#print checkmark(l)
def getfl(thestr):
for i in range(len(thestr)):
print thestr[i]," ",thestr[-i-1]
#getfl("hello")
def mycmp(astr,bstr):
a,b = len(astr),len(bstr)
if a != b:
return False
for i in range(a):
if astr[i] != bstr[i]:
return False
else:
return True
#print mycmp('hellO','hello')
def myrcmp(astr,bstr):
a,b = len(astr),len(bstr)
if a != b:
return False
for i in range(a):
if astr[i] != bstr[-i-1]:
return False
else:
return True
#print myrcmp('ollhh','hello')
def getrstr(thestr):
return thestr + thestr[::-1]
#print getrstr("hello")
def mystrip(thestr):
thestrlen = len(thestr)
begl,endl = 0,0
for i in range(thestrlen):
if thestr[i] == ' ':
begl += 1
else:
break
for i in range(thestrlen):
if thestr[-i - 1] == ' ':
endl += 1
else:
break
return thestr[begl:thestrlen-1-endl]
print mystrip('hello '),'test','test'
print mystrip(' hello ')
print mystrip(' hello')
|
normal
|
{
"blob_id": "f98d6dd9ac4714c24ce070a1a81dc4610d04b97e",
"index": 6017,
"step-1": "# -*- coding: UTF-8 -*- \n# File Name: ll.py\n# Author: Sam\n# mail: [email protected]\n# Created Time: 2016年03月09日 星期三 19时18分02秒\n#########################################################################\n#!/usr/bin/env python\ndef checkmark(marks):\n if not isinstance(marks,list):\n return 'marks Error'\n else:\n mark = float(sum(marks))/len(marks)\n if mark >= 90:\n return 'A'\n elif mark >= 80:\n return 'B'\n elif mark >= 70:\n return 'C'\n elif mark >= 60:\n return 'D'\n else:\n return 'F'\n##l = [100,80,90,90]\n#print checkmark(l)\n\ndef getfl(thestr):\n for i in range(len(thestr)):\n print thestr[i],\" \",thestr[-i-1]\n \n#getfl(\"hello\")\ndef mycmp(astr,bstr):\n a,b = len(astr),len(bstr)\n if a != b:\n return False\n for i in range(a):\n if astr[i] != bstr[i]:\n return False\n else:\n return True\n\n#print mycmp('hellO','hello')\ndef myrcmp(astr,bstr):\n a,b = len(astr),len(bstr)\n if a != b:\n return False\n for i in range(a):\n if astr[i] != bstr[-i-1]:\n return False\n else:\n return True\n\n#print myrcmp('ollhh','hello')\n\ndef getrstr(thestr):\n return thestr + thestr[::-1]\n\n#print getrstr(\"hello\")\n\ndef mystrip(thestr):\n thestrlen = len(thestr)\n begl,endl = 0,0\n for i in range(thestrlen):\n if thestr[i] == ' ':\n begl += 1\n else:\n break\n for i in range(thestrlen):\n if thestr[-i - 1] == ' ':\n endl += 1 \n else:\n break\n return thestr[begl:thestrlen-1-endl]\n\nprint mystrip('hello '),'test','test'\nprint mystrip(' hello ')\nprint mystrip(' hello')\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""
Test /cohort/:id/user/:id
"""
import re
from unittest.mock import patch
from django.urls.base import reverse_lazy
from rest_framework import status
from breathecode.tests.mocks import (
GOOGLE_CLOUD_PATH,
apply_google_cloud_client_mock,
apply_google_cloud_bucket_mock,
apply_google_cloud_blob_mock,
)
from ..mixins import AdmissionsTestCase
class CohortIdUserIdTestSuite(AdmissionsTestCase):
"""Test /cohort/:id/user/:id"""
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(
json, {
'detail': 'Authentication credentials were not provided.',
'status_code': status.HTTP_401_UNAUTHORIZED
})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 999
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {
'id': model.cohort_user.id,
'role': model.cohort_user.role,
'educational_status': model.cohort_user.educational_status,
'finantial_status': model.cohort_user.finantial_status,
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 9999
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': 9999,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True,
task=True,
task_status='PENDING',
task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'User has tasks with status pending the educational status cannot be GRADUATED',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
'finantial_status': 'LATE',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
normal
|
{
"blob_id": "937711546271c145d0f0df2981bdd7d1e9297e3a",
"index": 3788,
"step-1": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n <mask token>\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n <mask token>\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-2": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n <mask token>\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-3": "<mask token>\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-4": "<mask token>\nimport re\nfrom unittest.mock import patch\nfrom django.urls.base import reverse_lazy\nfrom rest_framework import status\nfrom breathecode.tests.mocks import GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, apply_google_cloud_blob_mock\nfrom ..mixins import AdmissionsTestCase\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n self.assertEqual(json, {'detail':\n 'Authentication credentials were not provided.', 'status_code':\n status.HTTP_401_UNAUTHORIZED})\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 999})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Specified cohort not be found'}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'id': model.cohort_user.id, 'role': model.cohort_user.\n role, 'educational_status': model.cohort_user.\n educational_status, 'finantial_status': model.cohort_user.\n finantial_status}\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': 9999})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': 9999, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, specialty_mode=True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True, task=True,\n task_status='PENDING', task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'User has tasks with status pending the educational status cannot be GRADUATED'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=\n True, profile_academy=True, cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={\n 'cohort_id': model.cohort.id, 'user_id': model.user.id})\n data = {'educational_status': 'GRADUATED', 'finantial_status': 'LATE'}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail':\n 'Cannot be marked as `GRADUATED` if its financial status is `LATE`'\n }\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-5": "\"\"\"\nTest /cohort/:id/user/:id\n\"\"\"\nimport re\nfrom unittest.mock import patch\nfrom django.urls.base import reverse_lazy\nfrom rest_framework import status\nfrom breathecode.tests.mocks import (\n GOOGLE_CLOUD_PATH,\n apply_google_cloud_client_mock,\n apply_google_cloud_bucket_mock,\n apply_google_cloud_blob_mock,\n)\nfrom ..mixins import AdmissionsTestCase\n\n\nclass CohortIdUserIdTestSuite(AdmissionsTestCase):\n \"\"\"Test /cohort/:id/user/:id\"\"\"\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_without_auth(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})\n response = self.client.get(url)\n json = response.json()\n\n self.assertEqual(\n json, {\n 'detail': 'Authentication credentials were not provided.',\n 'status_code': status.HTTP_401_UNAUTHORIZED\n })\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True)\n url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid cohort_id'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': 999\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'invalid user_id'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_bad_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_without_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id_but_with_user(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True, cohort=True, user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {}\n response = self.client.put(url, data)\n json = response.json()\n expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n model_dict = self.get_cohort_user_dict(1)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'id': model.cohort_user.id,\n 'role': model.cohort_user.role,\n 'educational_status': model.cohort_user.educational_status,\n 'finantial_status': model.cohort_user.finantial_status,\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(self.count_cohort_user(), 1)\n self.assertEqual(self.get_cohort_user_dict(1), model_dict)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': 9999\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': 9999,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_delete_with_id(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n specialty_mode=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {'specialty_mode': model.specialty_mode.id}\n response = self.client.delete(url, data)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n self.assertEqual(self.count_cohort_user(), 0)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_task(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n profile_academy=True,\n cohort_user=True,\n task=True,\n task_status='PENDING',\n task_type='PROJECT')\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {\n 'educational_status': 'GRADUATED',\n }\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'status_code': 400,\n 'detail': 'User has tasks with status pending the educational status cannot be GRADUATED',\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n\n @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())\n @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())\n @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())\n def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):\n \"\"\"Test /cohort/:id/user/:id without auth\"\"\"\n model = self.generate_models(authenticate=True,\n cohort=True,\n user=True,\n profile_academy=True,\n cohort_user=True)\n url = reverse_lazy('admissions:cohort_id_user_id',\n kwargs={\n 'cohort_id': model.cohort.id,\n 'user_id': model.user.id\n })\n data = {\n 'educational_status': 'GRADUATED',\n 'finantial_status': 'LATE',\n }\n response = self.client.put(url, data)\n json = response.json()\n expected = {\n 'status_code': 400,\n 'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`',\n }\n\n self.assertEqual(json, expected)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n",
"step-ids": [
9,
13,
14,
15,
16
]
}
|
[
9,
13,
14,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while i != -1:
i = s.find(st, j)
if k != i and i != -1:
k = i
sch1 += 1
j += 1
<|reserved_special_token_0|>
while i != -1:
i = s.find(st2, j)
if k != i and i != -1:
k = i
sch2 += 1
j += 1
print(sch1 + sch2)
<|reserved_special_token_1|>
s = input()
st = '>>-->'
st2 = '<--<<'
sch1 = sch2 = 0
i = 0
j = 0
k = -1
while i != -1:
i = s.find(st, j)
if k != i and i != -1:
k = i
sch1 += 1
j += 1
j = 0
i = 0
k = -1
while i != -1:
i = s.find(st2, j)
if k != i and i != -1:
k = i
sch2 += 1
j += 1
print(sch1 + sch2)
<|reserved_special_token_1|>
s = input()
st = '>>-->'
st2 = '<--<<'
sch1 = sch2 = 0
i = 0
j = 0
k = -1
while i != -1:
i = s.find(st, j)
if (k != i) and (i != -1):
k = i
sch1 += 1
j += 1
j = 0
i = 0
k = -1
while i != -1:
i = s.find(st2, j)
if (k != i) and (i != -1):
k = i
sch2 += 1
j += 1
print(sch1+sch2)
|
flexible
|
{
"blob_id": "c18e452592d53f22858f2307c60aa997b809c3c3",
"index": 4356,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile i != -1:\n i = s.find(st, j)\n if k != i and i != -1:\n k = i\n sch1 += 1\n j += 1\n<mask token>\nwhile i != -1:\n i = s.find(st2, j)\n if k != i and i != -1:\n k = i\n sch2 += 1\n j += 1\nprint(sch1 + sch2)\n",
"step-3": "s = input()\nst = '>>-->'\nst2 = '<--<<'\nsch1 = sch2 = 0\ni = 0\nj = 0\nk = -1\nwhile i != -1:\n i = s.find(st, j)\n if k != i and i != -1:\n k = i\n sch1 += 1\n j += 1\nj = 0\ni = 0\nk = -1\nwhile i != -1:\n i = s.find(st2, j)\n if k != i and i != -1:\n k = i\n sch2 += 1\n j += 1\nprint(sch1 + sch2)\n",
"step-4": "s = input()\nst = '>>-->'\nst2 = '<--<<'\nsch1 = sch2 = 0\ni = 0\nj = 0\nk = -1\nwhile i != -1:\n i = s.find(st, j)\n if (k != i) and (i != -1):\n k = i\n sch1 += 1\n j += 1\nj = 0\ni = 0\nk = -1\nwhile i != -1:\n i = s.find(st2, j)\n if (k != i) and (i != -1):\n k = i\n sch2 += 1\n j += 1\nprint(sch1+sch2)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.models import User
from .models import Museo, Distrito, Comentario, Favorito, Like, Titulo, Letra, Color
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import authenticate, login
from django.contrib.auth import logout
from web.parser import parseXML
import operator
from django.template.loader import get_template
from django.template import Context
import datetime
def getMuseums():
museos = Museo.objects.all()
allMuseums = {}
for museo in museos:
allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()
return allMuseums
def getAccessibleMuseums():
museos = Museo.objects.all()
allMuseums = {}
for museo in museos:
if museo.ACCESIBILIDAD == '1':
allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()
return allMuseums
def getRanking():
allMuseums = getMuseums()
ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))
ranking.reverse()
return ranking
def getAccessibleRanking():
allMuseums = getAccessibleMuseums()
ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))
ranking.reverse()
return ranking
@csrf_exempt
def mainPage(request):
template = get_template('index.html')
topFive = range(5)
list = '<br>'
markers = ''
if request.method == 'GET' or (request.method == 'POST' and request.POST['accion'] == 'mostrar'):
ranking = getRanking()
list = (list + "<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>" +
"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>")
if len(ranking) > 0:
for item in topFive:
if ranking[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])
list = list + "<center><a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
list = list + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
list = list + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if ranking[0][1] == 0:
list = list + "<a class='titulos'><center>" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
else:
list = list + '</div>'
list = list + "<center><a class='info' href='/xml'>XML de la página</a></center>"
else:
list = list + "<a class='titulos'><center>" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
elif request.method == 'POST' and request.POST['accion'] == 'ocultar':
ranking = getAccessibleRanking()
list = (list + "<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>" +
"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>")
if len(ranking) > 0:
for item in topFive:
if ranking[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])
list = list + "<center><a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
list = list + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
list = list + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if ranking[0][1] == 0:
list = list + "<a class='titulos'><center>" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
else:
list = list + '</div>'
list = list + "<center><a class='info' href='/xml'>XML de la página</a></center>"
else:
list = list + "<a class='titulos'><center>" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
users = User.objects.all()
userList = ''
for user in users:
try:
title = Titulo.objects.get(usuario = user.username)
userList = userList + "<li><a href='/" + user.username + "'>" + title.titulo + ' - ' + user.username + "</a></li></br>"
except Titulo.DoesNotExist:
userList = userList + "<li><a href='/" + user.username + "'>Página de " + user.username + "</a></li></br>"
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))
@csrf_exempt
def museumsPage(request):
template = get_template('museos.html')
if request.method == 'GET':
museos = Museo.objects.all()
elif request.method == 'POST':
distrito = Distrito.objects.get(nombre = request.POST['distrito'])
museos = distrito.museo_set.all()
list = ''
markers = ''
i = 1
for museo in museos:
list = list + "<center><a class='titulos'>" + museo.NOMBRE + '</a></br>'
list = list + "<a class='info' href=" + "/museos/" + museo.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museo.LATITUD != 'No disponible' and museo.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museo.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museo.NOMBRE + "</h1>'});" +
"var " + "X" + museo.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museo.LATITUD + ", lng: " + museo.LONGITUD + " },map: map});" +
"X" + museo.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museo.ID_ENTIDAD + "info.open(map," + "X" + museo.ID_ENTIDAD + "marker);" +
"});")
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
distritos = Distrito.objects.all()
districtList = ''
for distrito in distritos:
districtList = districtList + "<option value='" + distrito.nombre + "'>" + distrito.nombre + "</option>"
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'districtList': districtList, 'formato': style, 'markers': markers})))
@csrf_exempt
def museumPage(request, museumID):
template = get_template('museo.html')
museum = Museo.objects.get(ID_ENTIDAD = museumID)
if request.method == 'POST' and 'comentario' in request.POST:
comment = Comentario(texto = request.POST['comentario'], museo = museum, usuario = request.user.username)
comment.save()
elif request.method == 'POST' and 'añadir' in request.POST:
fav = Favorito(museo = museum, usuario = request.user)
fav.save()
elif request.method == 'POST' and 'quitar' in request.POST:
Favorito.objects.filter(museo = museum, usuario = request.user).delete()
elif request.method == 'POST' and 'mas' in request.POST:
like = Like(museo = museum, usuario = request.user)
like.save()
elif request.method == 'POST' and 'menos' in request.POST:
Like.objects.filter(museo = museum, usuario = request.user).delete()
comments = museum.comentario_set.all()
message = ("<center><b><a class='titulos_museo'>" + museum.NOMBRE + "</a></b></center><div id='scroll'></br>"
"<center><b><a class='titulos_museo'>Descripción</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.DESCRIPCION_ENTIDAD + '</a></center></br>'
"<center><b><a class='titulos_museo'>Horario</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.HORARIO + '</a></center></br>'
"<center><b><a class='titulos_museo'>Accesibilidad</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.ACCESIBILIDAD + '</a></center></br>'
"<center><b><a class='titulos_museo'>Dirección</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a><center></br>'
"<center><a class='texto_museo'>Barrio: " + museum.BARRIO + '</a></center></br>'
"<center><a class='texto_museo'>Distrito: " + str(museum.DISTRITO) + '</a></center></br>'
"<center><b><a class='titulos_museo'>Datos de contacto</a></b></center></br>"
"<center><a class='texto_museo'>Teléfono: " + museum.TELEFONO + '</a></center></br>'
"<center><a class='texto_museo'>Email: " + museum.EMAIL + '</a></center></br>'
"<center><b><a class='titulos_museo'>Comentarios</a></b></center></br>")
allComments = ''
for comment in comments:
allComments = allComments + "<center><a class='texto_museo'><b>" + 'Anónimo</b>: ' + comment.texto + ', ' + (datetime.timedelta(hours=2) + comment.fecha).strftime("%H:%M:%S %d-%m-%Y") + '</a></center></br>'
message = message + allComments
style = ''
if request.user.is_authenticated():
login = 1
try:
favorito = Favorito.objects.get(museo = museum, usuario = request.user)
favoriteButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='quitar' value='fav'>" +
"<input class='desplegable' type='submit' value='Quitar de favoritos'></form></center>")
except Favorito.DoesNotExist:
favoriteButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='añadir' value='fav'>" +
"<input class='desplegable' type='submit' value='Añadir a favoritos'></form></center>")
try:
like = Like.objects.get(museo = museum, usuario = request.user)
likeButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='menos' value='like'>" +
"<input class='desplegable' type='submit' value='Dislike'></form></center>")
except Like.DoesNotExist:
likeButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='mas' value='like'>" +
"<input class='desplegable' type='submit' value='Like'></form></center>")
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
favoriteButton = ''
likeButton = ''
if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':
marker = ("var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
else:
marker = ''
return HttpResponse(template.render(Context({'body': message, 'login': login, 'user': request.user, 'id': museumID, 'fav': favoriteButton, 'like': likeButton, 'formato': style, 'marker': marker})))
@csrf_exempt
def loginPage(request):
if request.method == 'POST':
if not request.user.is_authenticated() and 'login' in request.POST:
username = request.POST['Usuario']
password = request.POST['Contraseña']
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
elif not request.user.is_authenticated() and 'registro' in request.POST:
username = request.POST['Usuario']
password = request.POST['Contraseña']
try:
user = User.objects.get(username = username)
user = authenticate(username = username, password = password)
if user is not None:
login(request, user)
except User.DoesNotExist:
user = User.objects.create_user(username = username, password = password)
user.save()
request.method = 'GET'
return mainPage(request)
def logoutPage(request):
logout(request)
return mainPage(request)
def userPage(request, user, number):
if number == None:
number = 1
template = get_template('personal.html')
listTotal = ''
favoritos = Favorito.objects.filter(usuario = user)
group = range(5)
count = 0;
markers = ''
for favorito in favoritos:
count = count + 1;
museum = Museo.objects.get(NOMBRE = favorito.museo)
listTotal = listTotal + "<a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
listTotal = listTotal + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
listTotal = listTotal + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a> <b>Fecha de guardado:' + (datetime.timedelta(hours=2) + favorito.fecha).strftime("%H:%M:%S %d-%m-%Y") + '</b></br></br></br>'
if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if (count % 5) == 0:
listTotal = listTotal + ';'
group = listTotal.split(';')[int(number) - 1]
list = ''
if (favoritos.count() % 5) == 0:
pages = int(favoritos.count() / 5)
else:
pages = int(favoritos.count() / 5) + 1
pagesRange = range(pages)
if pages > 1:
list = '<br>'
if int(number) > 1:
list = list + "<center><div class='pagination'><a href='/" + user + "/" + str(int(number) - 1) + "'>«</a>"
else:
list = list + "<center><div class='pagination'><a href='/" + user + "/" + str(number) + "'>«</a>"
for page in pagesRange:
if page == (int(number) - 1):
list = list + "<a class='active' href='/" + user + "/" + str(page + 1) + "'>" + str(page + 1) + "</a>"
else:
list = list + "<a href='/" + user + "/" + str(page + 1) + "'>" + str(page + 1) + "</a>"
if int(number) == pages:
list = list + "<a href='/" + user + "/" + str(number) + "'>»</a></div></center></br>"
else:
list = list + "<a href='/" + user + "/" + str(int(number) + 1) + "'>»</a></div></center></br>"
list = list + "<div id='scroll'><center>"
for item in group:
list = list + item
if (list == '' or list == "<div id='scroll'><center>") and user != 'AnonymousUser':
list = "<center><a class='titulos'>" + 'Para que aparezcan museos en esta página, ' + user + ' tiene que añadirlos.' + '</a></center></br></br>'
elif (list == '' or list == "<div id='scroll'><center>") and user == 'AnonymousUser':
list = "<center><a class='titulos'>" + 'Para ver tu página personal, primero tienes que loguearte.' + '</a></center></br></br>'
else:
list = list + "<center><a class='info' href='/" + user + "/xml'>XML del usuario</a></center>"
list = list + '</center></div>'
users = User.objects.all()
userList = ''
for user in users:
try:
title = Titulo.objects.get(usuario = user.username)
userList = userList + "<li><a href='/" + user.username + "'>" + title.titulo + ' - ' + user.username + "</a></li></br>"
except Titulo.DoesNotExist:
userList = userList + "<li><a href='/" + user.username + "'>Página de " + user.username + "</a></li></br>"
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))
def userXMLPage(request, user):
template = get_template("personalXML.xml")
favoriteList = []
favoriteMuseums = Favorito.objects.filter(usuario = user)
for favorite in favoriteMuseums:
favoriteList = favoriteList + [favorite.museo]
return HttpResponse(template.render(Context({'favoriteList': favoriteList, 'user': user})), content_type = "text/xml")
def XMLPage(request):
template = get_template("personalXML.xml")
user = ''
topList = []
topMuseums = getRanking()
topFive = range(5)
for item in topFive:
if topMuseums[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])
topList = topList + [museum]
return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = "text/xml")
def XMLAccesiblePage(request):
template = get_template("personalXML.xml")
user = ''
topList = []
topMuseums = getAccessibleRanking()
topFive = range(5)
for item in topFive:
if topMuseums[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])
topList = topList + [museum]
return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = "text/xml")
@csrf_exempt
def preferencesPage(request, user):
template = get_template("preferencias.html")
if request.method == 'POST':
if 'color' in request.POST:
try:
color = Color.objects.get(usuario = user)
color.color = request.POST['color']
except Color.DoesNotExist:
color = Color(usuario = user, color = request.POST['color'])
color.save()
elif 'tamaño' in request.POST:
try:
size = Letra.objects.get(usuario = user)
size.letra = request.POST['tamaño']
except Letra.DoesNotExist:
size = Letra(usuario = user, letra = request.POST['tamaño'])
size.save()
elif 'título' in request.POST:
try:
title = Titulo.objects.get(usuario = user)
title.titulo = request.POST['título']
except Titulo.DoesNotExist:
title = Titulo(usuario = user, titulo = request.POST['título'])
title.save()
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'login': login, 'user': user, 'formato': style})))
def aboutPage(request):
template = get_template('about.html')
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'login': login, 'user': request.user, 'formato': style})))
def updateDB(request):
#Museo.objects.all().delete()
museos = parseXML('web/museos.xml')
for museo in museos:
try:
distrito = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])
except Distrito.DoesNotExist:
distrito = Distrito(nombre = museos[museo]['DISTRITO'])
distrito.save()
for museo in museos:
try:
A = museos[museo]['ID-ENTIDAD']
except KeyError:
A = 'No disponible'
try:
B = museos[museo]['NOMBRE']
except KeyError:
B = 'No disponible'
try:
C = museos[museo]['DESCRIPCION-ENTIDAD']
except KeyError:
C = 'No disponible'
try:
D = museos[museo]['HORARIO']
except KeyError:
D = 'No disponible'
try:
E = museos[museo]['TRANSPORTE']
except KeyError:
E = 'No disponible'
try:
F = museos[museo]['ACCESIBILIDAD']
except KeyError:
F = 'No disponible'
try:
G = museos[museo]['CONTENT-URL']
except KeyError:
G = 'No disponible'
try:
H = museos[museo]['NOMBRE-VIA']
except KeyError:
H = 'No disponible'
try:
I = museos[museo]['CLASE-VIAL']
except KeyError:
I = 'No disponible'
try:
J = museos[museo]['TIPO-NUM']
except KeyError:
J = 'No disponible'
try:
K = museos[museo]['NUM']
except KeyError:
K = 'No disponible'
try:
L = museos[museo]['LOCALIDAD']
except KeyError:
L = 'No disponible'
try:
M = museos[museo]['PROVINCIA']
except KeyError:
M = 'No disponible'
try:
N = museos[museo]['CODIGO-POSTAL']
except KeyError:
N = 'No disponible'
try:
Ñ = museos[museo]['BARRIO']
except KeyError:
Ñ = 'No disponible'
try:
O = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])
except KeyError:
O = 'No disponible'
try:
P = museos[museo]['COORDENADA-X']
except KeyError:
P = 'No disponible'
try:
Q = museos[museo]['COORDENADA-Y']
except KeyError:
Q = 'No disponible'
try:
R = museos[museo]['LATITUD']
except KeyError:
R = 'No disponible'
try:
S = museos[museo]['LONGITUD']
except KeyError:
S = 'No disponible'
try:
T = museos[museo]['TELEFONO']
except KeyError:
T = 'No disponible'
try:
U = museos[museo]['FAX']
except KeyError:
U = 'No disponible'
try:
V = museos[museo]['EMAIL']
except KeyError:
V = 'No disponible'
try:
W = museos[museo]['TIPO']
except KeyError:
W = 'No disponible'
try:
viejoMuseo = Museo.objects.get(ID_ENTIDAD = A)
except Museo.DoesNotExist:
nuevoMuseo = Museo(
ID_ENTIDAD = A,
NOMBRE = B,
DESCRIPCION_ENTIDAD = C,
HORARIO = D,
TRANSPORTE = E,
ACCESIBILIDAD = F,
CONTENT_URL = G,
NOMBRE_VIA = H,
CLASE_VIAL = I,
TIPO_NUM = J,
NUM = K,
LOCALIDAD = L,
PROVINCIA = M,
CODIGO_POSTAL = N,
BARRIO = Ñ,
DISTRITO = O,
COORDENADA_X = P,
COORDENADA_Y = Q,
LATITUD = R,
LONGITUD = S,
TELEFONO = T,
FAX = U,
EMAIL = V,
TIPO = W)
nuevoMuseo.save()
return mainPage(request)
|
normal
|
{
"blob_id": "8b2911586e21162bec074732216c410c591f18a8",
"index": 6018,
"step-1": "<mask token>\n\n\ndef getMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getAccessibleMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n if museo.ACCESIBILIDAD == '1':\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getRanking():\n allMuseums = getMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\ndef getAccessibleRanking():\n allMuseums = getAccessibleMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\n@csrf_exempt\ndef mainPage(request):\n template = get_template('index.html')\n topFive = range(5)\n list = '<br>'\n markers = ''\n if request.method == 'GET' or request.method == 'POST' and request.POST[\n 'accion'] == 'mostrar':\n ranking = getRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!' +\n '</center></a></br></br></div>')\n elif request.method == 'POST' and request.POST['accion'] == 'ocultar':\n ranking = getAccessibleRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponbile' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\n@csrf_exempt\ndef museumsPage(request):\n template = get_template('museos.html')\n if request.method == 'GET':\n museos = Museo.objects.all()\n elif request.method == 'POST':\n distrito = Distrito.objects.get(nombre=request.POST['distrito'])\n museos = distrito.museo_set.all()\n list = ''\n markers = ''\n i = 1\n for museo in museos:\n list = (list + \"<center><a class='titulos'>\" + museo.NOMBRE +\n '</a></br>')\n list = (list + \"<a class='info' href=\" + '/museos/' + museo.\n ID_ENTIDAD + '/>Más información</a></center></br></br>')\n if (museo.LATITUD != 'No disponible' and museo.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museo.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museo.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museo.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museo.LATITUD + ', lng: ' + museo.\n LONGITUD + ' },map: map});' + 'X' + museo.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museo.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museo.ID_ENTIDAD +\n 'marker);' + '});')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n distritos = Distrito.objects.all()\n districtList = ''\n for distrito in distritos:\n districtList = (districtList + \"<option value='\" + distrito.nombre +\n \"'>\" + distrito.nombre + '</option>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'districtList': districtList,\n 'formato': style, 'markers': markers})))\n\n\n<mask token>\n\n\n@csrf_exempt\ndef loginPage(request):\n if request.method == 'POST':\n if not request.user.is_authenticated() and 'login' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n elif not request.user.is_authenticated(\n ) and 'registro' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n try:\n user = User.objects.get(username=username)\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n except User.DoesNotExist:\n user = User.objects.create_user(username=username, password\n =password)\n user.save()\n request.method = 'GET'\n return mainPage(request)\n\n\n<mask token>\n\n\ndef XMLAccesiblePage(request):\n template = get_template('personalXML.xml')\n user = ''\n topList = []\n topMuseums = getAccessibleRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList,\n 'user': user})), content_type='text/xml')\n\n\n<mask token>\n\n\ndef aboutPage(request):\n template = get_template('about.html')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n request.user, 'formato': style})))\n\n\ndef updateDB(request):\n museos = parseXML('web/museos.xml')\n for museo in museos:\n try:\n distrito = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except Distrito.DoesNotExist:\n distrito = Distrito(nombre=museos[museo]['DISTRITO'])\n distrito.save()\n for museo in museos:\n try:\n A = museos[museo]['ID-ENTIDAD']\n except KeyError:\n A = 'No disponible'\n try:\n B = museos[museo]['NOMBRE']\n except KeyError:\n B = 'No disponible'\n try:\n C = museos[museo]['DESCRIPCION-ENTIDAD']\n except KeyError:\n C = 'No disponible'\n try:\n D = museos[museo]['HORARIO']\n except KeyError:\n D = 'No disponible'\n try:\n E = museos[museo]['TRANSPORTE']\n except KeyError:\n E = 'No disponible'\n try:\n F = museos[museo]['ACCESIBILIDAD']\n except KeyError:\n F = 'No disponible'\n try:\n G = museos[museo]['CONTENT-URL']\n except KeyError:\n G = 'No disponible'\n try:\n H = museos[museo]['NOMBRE-VIA']\n except KeyError:\n H = 'No disponible'\n try:\n I = museos[museo]['CLASE-VIAL']\n except KeyError:\n I = 'No disponible'\n try:\n J = museos[museo]['TIPO-NUM']\n except KeyError:\n J = 'No disponible'\n try:\n K = museos[museo]['NUM']\n except KeyError:\n K = 'No disponible'\n try:\n L = museos[museo]['LOCALIDAD']\n except KeyError:\n L = 'No disponible'\n try:\n M = museos[museo]['PROVINCIA']\n except KeyError:\n M = 'No disponible'\n try:\n N = museos[museo]['CODIGO-POSTAL']\n except KeyError:\n N = 'No disponible'\n try:\n Ñ = museos[museo]['BARRIO']\n except KeyError:\n Ñ = 'No disponible'\n try:\n O = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except KeyError:\n O = 'No disponible'\n try:\n P = museos[museo]['COORDENADA-X']\n except KeyError:\n P = 'No disponible'\n try:\n Q = museos[museo]['COORDENADA-Y']\n except KeyError:\n Q = 'No disponible'\n try:\n R = museos[museo]['LATITUD']\n except KeyError:\n R = 'No disponible'\n try:\n S = museos[museo]['LONGITUD']\n except KeyError:\n S = 'No disponible'\n try:\n T = museos[museo]['TELEFONO']\n except KeyError:\n T = 'No disponible'\n try:\n U = museos[museo]['FAX']\n except KeyError:\n U = 'No disponible'\n try:\n V = museos[museo]['EMAIL']\n except KeyError:\n V = 'No disponible'\n try:\n W = museos[museo]['TIPO']\n except KeyError:\n W = 'No disponible'\n try:\n viejoMuseo = Museo.objects.get(ID_ENTIDAD=A)\n except Museo.DoesNotExist:\n nuevoMuseo = Museo(ID_ENTIDAD=A, NOMBRE=B, DESCRIPCION_ENTIDAD=\n C, HORARIO=D, TRANSPORTE=E, ACCESIBILIDAD=F, CONTENT_URL=G,\n NOMBRE_VIA=H, CLASE_VIAL=I, TIPO_NUM=J, NUM=K, LOCALIDAD=L,\n PROVINCIA=M, CODIGO_POSTAL=N, BARRIO=Ñ, DISTRITO=O,\n COORDENADA_X=P, COORDENADA_Y=Q, LATITUD=R, LONGITUD=S,\n TELEFONO=T, FAX=U, EMAIL=V, TIPO=W)\n nuevoMuseo.save()\n return mainPage(request)\n",
"step-2": "<mask token>\n\n\ndef getMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getAccessibleMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n if museo.ACCESIBILIDAD == '1':\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getRanking():\n allMuseums = getMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\ndef getAccessibleRanking():\n allMuseums = getAccessibleMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\n@csrf_exempt\ndef mainPage(request):\n template = get_template('index.html')\n topFive = range(5)\n list = '<br>'\n markers = ''\n if request.method == 'GET' or request.method == 'POST' and request.POST[\n 'accion'] == 'mostrar':\n ranking = getRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!' +\n '</center></a></br></br></div>')\n elif request.method == 'POST' and request.POST['accion'] == 'ocultar':\n ranking = getAccessibleRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponbile' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\n@csrf_exempt\ndef museumsPage(request):\n template = get_template('museos.html')\n if request.method == 'GET':\n museos = Museo.objects.all()\n elif request.method == 'POST':\n distrito = Distrito.objects.get(nombre=request.POST['distrito'])\n museos = distrito.museo_set.all()\n list = ''\n markers = ''\n i = 1\n for museo in museos:\n list = (list + \"<center><a class='titulos'>\" + museo.NOMBRE +\n '</a></br>')\n list = (list + \"<a class='info' href=\" + '/museos/' + museo.\n ID_ENTIDAD + '/>Más información</a></center></br></br>')\n if (museo.LATITUD != 'No disponible' and museo.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museo.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museo.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museo.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museo.LATITUD + ', lng: ' + museo.\n LONGITUD + ' },map: map});' + 'X' + museo.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museo.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museo.ID_ENTIDAD +\n 'marker);' + '});')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n distritos = Distrito.objects.all()\n districtList = ''\n for distrito in distritos:\n districtList = (districtList + \"<option value='\" + distrito.nombre +\n \"'>\" + distrito.nombre + '</option>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'districtList': districtList,\n 'formato': style, 'markers': markers})))\n\n\n<mask token>\n\n\n@csrf_exempt\ndef loginPage(request):\n if request.method == 'POST':\n if not request.user.is_authenticated() and 'login' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n elif not request.user.is_authenticated(\n ) and 'registro' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n try:\n user = User.objects.get(username=username)\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n except User.DoesNotExist:\n user = User.objects.create_user(username=username, password\n =password)\n user.save()\n request.method = 'GET'\n return mainPage(request)\n\n\n<mask token>\n\n\ndef userPage(request, user, number):\n if number == None:\n number = 1\n template = get_template('personal.html')\n listTotal = ''\n favoritos = Favorito.objects.filter(usuario=user)\n group = range(5)\n count = 0\n markers = ''\n for favorito in favoritos:\n count = count + 1\n museum = Museo.objects.get(NOMBRE=favorito.museo)\n listTotal = (listTotal + \"<a class='titulos' href=\" + museum.\n CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.\n comentario_set.count()) + ' Comentarios - ' + str(museum.\n like_set.count()) + ' Likes</b></br></br>')\n listTotal = (listTotal + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM +\n ', ' + museum.LOCALIDAD + '</a></br></br>')\n listTotal = (listTotal + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a> <b>Fecha de guardado:' + (datetime.\n timedelta(hours=2) + favorito.fecha).strftime(\n '%H:%M:%S %d-%m-%Y') + '</b></br></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museum.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museum.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' + museum.\n LONGITUD + ' },map: map});' + 'X' + museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museum.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museum.ID_ENTIDAD +\n 'marker);' + '});')\n if count % 5 == 0:\n listTotal = listTotal + ';'\n group = listTotal.split(';')[int(number) - 1]\n list = ''\n if favoritos.count() % 5 == 0:\n pages = int(favoritos.count() / 5)\n else:\n pages = int(favoritos.count() / 5) + 1\n pagesRange = range(pages)\n if pages > 1:\n list = '<br>'\n if int(number) > 1:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(int(number) - 1) + \"'>«</a>\")\n else:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(number) + \"'>«</a>\")\n for page in pagesRange:\n if page == int(number) - 1:\n list = list + \"<a class='active' href='/\" + user + '/' + str(\n page + 1) + \"'>\" + str(page + 1) + '</a>'\n else:\n list = list + \"<a href='/\" + user + '/' + str(page + 1\n ) + \"'>\" + str(page + 1) + '</a>'\n if int(number) == pages:\n list = list + \"<a href='/\" + user + '/' + str(number\n ) + \"'>»</a></div></center></br>\"\n else:\n list = list + \"<a href='/\" + user + '/' + str(int(number) + 1\n ) + \"'>»</a></div></center></br>\"\n list = list + \"<div id='scroll'><center>\"\n for item in group:\n list = list + item\n if (list == '' or list == \"<div id='scroll'><center>\"\n ) and user != 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para que aparezcan museos en esta página, ' + user +\n ' tiene que añadirlos.' + '</a></center></br></br>')\n elif (list == '' or list == \"<div id='scroll'><center>\"\n ) and user == 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para ver tu página personal, primero tienes que loguearte.' +\n '</a></center></br></br>')\n else:\n list = (list + \"<center><a class='info' href='/\" + user +\n \"/xml'>XML del usuario</a></center>\")\n list = list + '</center></div>'\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\n<mask token>\n\n\ndef XMLAccesiblePage(request):\n template = get_template('personalXML.xml')\n user = ''\n topList = []\n topMuseums = getAccessibleRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList,\n 'user': user})), content_type='text/xml')\n\n\n<mask token>\n\n\ndef aboutPage(request):\n template = get_template('about.html')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n request.user, 'formato': style})))\n\n\ndef updateDB(request):\n museos = parseXML('web/museos.xml')\n for museo in museos:\n try:\n distrito = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except Distrito.DoesNotExist:\n distrito = Distrito(nombre=museos[museo]['DISTRITO'])\n distrito.save()\n for museo in museos:\n try:\n A = museos[museo]['ID-ENTIDAD']\n except KeyError:\n A = 'No disponible'\n try:\n B = museos[museo]['NOMBRE']\n except KeyError:\n B = 'No disponible'\n try:\n C = museos[museo]['DESCRIPCION-ENTIDAD']\n except KeyError:\n C = 'No disponible'\n try:\n D = museos[museo]['HORARIO']\n except KeyError:\n D = 'No disponible'\n try:\n E = museos[museo]['TRANSPORTE']\n except KeyError:\n E = 'No disponible'\n try:\n F = museos[museo]['ACCESIBILIDAD']\n except KeyError:\n F = 'No disponible'\n try:\n G = museos[museo]['CONTENT-URL']\n except KeyError:\n G = 'No disponible'\n try:\n H = museos[museo]['NOMBRE-VIA']\n except KeyError:\n H = 'No disponible'\n try:\n I = museos[museo]['CLASE-VIAL']\n except KeyError:\n I = 'No disponible'\n try:\n J = museos[museo]['TIPO-NUM']\n except KeyError:\n J = 'No disponible'\n try:\n K = museos[museo]['NUM']\n except KeyError:\n K = 'No disponible'\n try:\n L = museos[museo]['LOCALIDAD']\n except KeyError:\n L = 'No disponible'\n try:\n M = museos[museo]['PROVINCIA']\n except KeyError:\n M = 'No disponible'\n try:\n N = museos[museo]['CODIGO-POSTAL']\n except KeyError:\n N = 'No disponible'\n try:\n Ñ = museos[museo]['BARRIO']\n except KeyError:\n Ñ = 'No disponible'\n try:\n O = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except KeyError:\n O = 'No disponible'\n try:\n P = museos[museo]['COORDENADA-X']\n except KeyError:\n P = 'No disponible'\n try:\n Q = museos[museo]['COORDENADA-Y']\n except KeyError:\n Q = 'No disponible'\n try:\n R = museos[museo]['LATITUD']\n except KeyError:\n R = 'No disponible'\n try:\n S = museos[museo]['LONGITUD']\n except KeyError:\n S = 'No disponible'\n try:\n T = museos[museo]['TELEFONO']\n except KeyError:\n T = 'No disponible'\n try:\n U = museos[museo]['FAX']\n except KeyError:\n U = 'No disponible'\n try:\n V = museos[museo]['EMAIL']\n except KeyError:\n V = 'No disponible'\n try:\n W = museos[museo]['TIPO']\n except KeyError:\n W = 'No disponible'\n try:\n viejoMuseo = Museo.objects.get(ID_ENTIDAD=A)\n except Museo.DoesNotExist:\n nuevoMuseo = Museo(ID_ENTIDAD=A, NOMBRE=B, DESCRIPCION_ENTIDAD=\n C, HORARIO=D, TRANSPORTE=E, ACCESIBILIDAD=F, CONTENT_URL=G,\n NOMBRE_VIA=H, CLASE_VIAL=I, TIPO_NUM=J, NUM=K, LOCALIDAD=L,\n PROVINCIA=M, CODIGO_POSTAL=N, BARRIO=Ñ, DISTRITO=O,\n COORDENADA_X=P, COORDENADA_Y=Q, LATITUD=R, LONGITUD=S,\n TELEFONO=T, FAX=U, EMAIL=V, TIPO=W)\n nuevoMuseo.save()\n return mainPage(request)\n",
"step-3": "<mask token>\n\n\ndef getMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getAccessibleMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n if museo.ACCESIBILIDAD == '1':\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getRanking():\n allMuseums = getMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\ndef getAccessibleRanking():\n allMuseums = getAccessibleMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\n@csrf_exempt\ndef mainPage(request):\n template = get_template('index.html')\n topFive = range(5)\n list = '<br>'\n markers = ''\n if request.method == 'GET' or request.method == 'POST' and request.POST[\n 'accion'] == 'mostrar':\n ranking = getRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!' +\n '</center></a></br></br></div>')\n elif request.method == 'POST' and request.POST['accion'] == 'ocultar':\n ranking = getAccessibleRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponbile' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\n@csrf_exempt\ndef museumsPage(request):\n template = get_template('museos.html')\n if request.method == 'GET':\n museos = Museo.objects.all()\n elif request.method == 'POST':\n distrito = Distrito.objects.get(nombre=request.POST['distrito'])\n museos = distrito.museo_set.all()\n list = ''\n markers = ''\n i = 1\n for museo in museos:\n list = (list + \"<center><a class='titulos'>\" + museo.NOMBRE +\n '</a></br>')\n list = (list + \"<a class='info' href=\" + '/museos/' + museo.\n ID_ENTIDAD + '/>Más información</a></center></br></br>')\n if (museo.LATITUD != 'No disponible' and museo.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museo.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museo.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museo.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museo.LATITUD + ', lng: ' + museo.\n LONGITUD + ' },map: map});' + 'X' + museo.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museo.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museo.ID_ENTIDAD +\n 'marker);' + '});')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n distritos = Distrito.objects.all()\n districtList = ''\n for distrito in distritos:\n districtList = (districtList + \"<option value='\" + distrito.nombre +\n \"'>\" + distrito.nombre + '</option>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'districtList': districtList,\n 'formato': style, 'markers': markers})))\n\n\n<mask token>\n\n\n@csrf_exempt\ndef loginPage(request):\n if request.method == 'POST':\n if not request.user.is_authenticated() and 'login' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n elif not request.user.is_authenticated(\n ) and 'registro' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n try:\n user = User.objects.get(username=username)\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n except User.DoesNotExist:\n user = User.objects.create_user(username=username, password\n =password)\n user.save()\n request.method = 'GET'\n return mainPage(request)\n\n\ndef logoutPage(request):\n logout(request)\n return mainPage(request)\n\n\ndef userPage(request, user, number):\n if number == None:\n number = 1\n template = get_template('personal.html')\n listTotal = ''\n favoritos = Favorito.objects.filter(usuario=user)\n group = range(5)\n count = 0\n markers = ''\n for favorito in favoritos:\n count = count + 1\n museum = Museo.objects.get(NOMBRE=favorito.museo)\n listTotal = (listTotal + \"<a class='titulos' href=\" + museum.\n CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.\n comentario_set.count()) + ' Comentarios - ' + str(museum.\n like_set.count()) + ' Likes</b></br></br>')\n listTotal = (listTotal + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM +\n ', ' + museum.LOCALIDAD + '</a></br></br>')\n listTotal = (listTotal + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a> <b>Fecha de guardado:' + (datetime.\n timedelta(hours=2) + favorito.fecha).strftime(\n '%H:%M:%S %d-%m-%Y') + '</b></br></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museum.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museum.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' + museum.\n LONGITUD + ' },map: map});' + 'X' + museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museum.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museum.ID_ENTIDAD +\n 'marker);' + '});')\n if count % 5 == 0:\n listTotal = listTotal + ';'\n group = listTotal.split(';')[int(number) - 1]\n list = ''\n if favoritos.count() % 5 == 0:\n pages = int(favoritos.count() / 5)\n else:\n pages = int(favoritos.count() / 5) + 1\n pagesRange = range(pages)\n if pages > 1:\n list = '<br>'\n if int(number) > 1:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(int(number) - 1) + \"'>«</a>\")\n else:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(number) + \"'>«</a>\")\n for page in pagesRange:\n if page == int(number) - 1:\n list = list + \"<a class='active' href='/\" + user + '/' + str(\n page + 1) + \"'>\" + str(page + 1) + '</a>'\n else:\n list = list + \"<a href='/\" + user + '/' + str(page + 1\n ) + \"'>\" + str(page + 1) + '</a>'\n if int(number) == pages:\n list = list + \"<a href='/\" + user + '/' + str(number\n ) + \"'>»</a></div></center></br>\"\n else:\n list = list + \"<a href='/\" + user + '/' + str(int(number) + 1\n ) + \"'>»</a></div></center></br>\"\n list = list + \"<div id='scroll'><center>\"\n for item in group:\n list = list + item\n if (list == '' or list == \"<div id='scroll'><center>\"\n ) and user != 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para que aparezcan museos en esta página, ' + user +\n ' tiene que añadirlos.' + '</a></center></br></br>')\n elif (list == '' or list == \"<div id='scroll'><center>\"\n ) and user == 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para ver tu página personal, primero tienes que loguearte.' +\n '</a></center></br></br>')\n else:\n list = (list + \"<center><a class='info' href='/\" + user +\n \"/xml'>XML del usuario</a></center>\")\n list = list + '</center></div>'\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\ndef userXMLPage(request, user):\n template = get_template('personalXML.xml')\n favoriteList = []\n favoriteMuseums = Favorito.objects.filter(usuario=user)\n for favorite in favoriteMuseums:\n favoriteList = favoriteList + [favorite.museo]\n return HttpResponse(template.render(Context({'favoriteList':\n favoriteList, 'user': user})), content_type='text/xml')\n\n\n<mask token>\n\n\ndef XMLAccesiblePage(request):\n template = get_template('personalXML.xml')\n user = ''\n topList = []\n topMuseums = getAccessibleRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList,\n 'user': user})), content_type='text/xml')\n\n\n@csrf_exempt\ndef preferencesPage(request, user):\n template = get_template('preferencias.html')\n if request.method == 'POST':\n if 'color' in request.POST:\n try:\n color = Color.objects.get(usuario=user)\n color.color = request.POST['color']\n except Color.DoesNotExist:\n color = Color(usuario=user, color=request.POST['color'])\n color.save()\n elif 'tamaño' in request.POST:\n try:\n size = Letra.objects.get(usuario=user)\n size.letra = request.POST['tamaño']\n except Letra.DoesNotExist:\n size = Letra(usuario=user, letra=request.POST['tamaño'])\n size.save()\n elif 'título' in request.POST:\n try:\n title = Titulo.objects.get(usuario=user)\n title.titulo = request.POST['título']\n except Titulo.DoesNotExist:\n title = Titulo(usuario=user, titulo=request.POST['título'])\n title.save()\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n user, 'formato': style})))\n\n\ndef aboutPage(request):\n template = get_template('about.html')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n request.user, 'formato': style})))\n\n\ndef updateDB(request):\n museos = parseXML('web/museos.xml')\n for museo in museos:\n try:\n distrito = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except Distrito.DoesNotExist:\n distrito = Distrito(nombre=museos[museo]['DISTRITO'])\n distrito.save()\n for museo in museos:\n try:\n A = museos[museo]['ID-ENTIDAD']\n except KeyError:\n A = 'No disponible'\n try:\n B = museos[museo]['NOMBRE']\n except KeyError:\n B = 'No disponible'\n try:\n C = museos[museo]['DESCRIPCION-ENTIDAD']\n except KeyError:\n C = 'No disponible'\n try:\n D = museos[museo]['HORARIO']\n except KeyError:\n D = 'No disponible'\n try:\n E = museos[museo]['TRANSPORTE']\n except KeyError:\n E = 'No disponible'\n try:\n F = museos[museo]['ACCESIBILIDAD']\n except KeyError:\n F = 'No disponible'\n try:\n G = museos[museo]['CONTENT-URL']\n except KeyError:\n G = 'No disponible'\n try:\n H = museos[museo]['NOMBRE-VIA']\n except KeyError:\n H = 'No disponible'\n try:\n I = museos[museo]['CLASE-VIAL']\n except KeyError:\n I = 'No disponible'\n try:\n J = museos[museo]['TIPO-NUM']\n except KeyError:\n J = 'No disponible'\n try:\n K = museos[museo]['NUM']\n except KeyError:\n K = 'No disponible'\n try:\n L = museos[museo]['LOCALIDAD']\n except KeyError:\n L = 'No disponible'\n try:\n M = museos[museo]['PROVINCIA']\n except KeyError:\n M = 'No disponible'\n try:\n N = museos[museo]['CODIGO-POSTAL']\n except KeyError:\n N = 'No disponible'\n try:\n Ñ = museos[museo]['BARRIO']\n except KeyError:\n Ñ = 'No disponible'\n try:\n O = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except KeyError:\n O = 'No disponible'\n try:\n P = museos[museo]['COORDENADA-X']\n except KeyError:\n P = 'No disponible'\n try:\n Q = museos[museo]['COORDENADA-Y']\n except KeyError:\n Q = 'No disponible'\n try:\n R = museos[museo]['LATITUD']\n except KeyError:\n R = 'No disponible'\n try:\n S = museos[museo]['LONGITUD']\n except KeyError:\n S = 'No disponible'\n try:\n T = museos[museo]['TELEFONO']\n except KeyError:\n T = 'No disponible'\n try:\n U = museos[museo]['FAX']\n except KeyError:\n U = 'No disponible'\n try:\n V = museos[museo]['EMAIL']\n except KeyError:\n V = 'No disponible'\n try:\n W = museos[museo]['TIPO']\n except KeyError:\n W = 'No disponible'\n try:\n viejoMuseo = Museo.objects.get(ID_ENTIDAD=A)\n except Museo.DoesNotExist:\n nuevoMuseo = Museo(ID_ENTIDAD=A, NOMBRE=B, DESCRIPCION_ENTIDAD=\n C, HORARIO=D, TRANSPORTE=E, ACCESIBILIDAD=F, CONTENT_URL=G,\n NOMBRE_VIA=H, CLASE_VIAL=I, TIPO_NUM=J, NUM=K, LOCALIDAD=L,\n PROVINCIA=M, CODIGO_POSTAL=N, BARRIO=Ñ, DISTRITO=O,\n COORDENADA_X=P, COORDENADA_Y=Q, LATITUD=R, LONGITUD=S,\n TELEFONO=T, FAX=U, EMAIL=V, TIPO=W)\n nuevoMuseo.save()\n return mainPage(request)\n",
"step-4": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.contrib.auth.models import User\nfrom .models import Museo, Distrito, Comentario, Favorito, Like, Titulo, Letra, Color\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.contrib.auth import authenticate, login\nfrom django.contrib.auth import logout\nfrom web.parser import parseXML\nimport operator\nfrom django.template.loader import get_template\nfrom django.template import Context\nimport datetime\n\n\ndef getMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getAccessibleMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n if museo.ACCESIBILIDAD == '1':\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\n\ndef getRanking():\n allMuseums = getMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\ndef getAccessibleRanking():\n allMuseums = getAccessibleMuseums()\n ranking = sorted(allMuseums.items(), key=operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n\n@csrf_exempt\ndef mainPage(request):\n template = get_template('index.html')\n topFive = range(5)\n list = '<br>'\n markers = ''\n if request.method == 'GET' or request.method == 'POST' and request.POST[\n 'accion'] == 'mostrar':\n ranking = getRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos con comentarios, ¡sé el primero en comentar!' +\n '</center></a></br></br></div>')\n elif request.method == 'POST' and request.POST['accion'] == 'ocultar':\n ranking = getAccessibleRanking()\n list = (list +\n \"<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>\"\n +\n \"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>\"\n )\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=ranking[item][0])\n list = (list + \"<center><a class='titulos' href=\" +\n museum.CONTENT_URL + '>' + museum.NOMBRE +\n '</a><br><b>' + str(museum.comentario_set.count()) +\n ' Comentarios - ' + str(museum.like_set.count()) +\n ' Likes</b></br></br>')\n list = (list + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' +\n museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n )\n list = (list + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a></center></br></br>')\n if (museum.LATITUD != 'No disponbile' and museum.\n LONGITUD != 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.\n ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' +\n museum.LONGITUD + ' },map: map});' + 'X' +\n museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" +\n 'X' + museum.ID_ENTIDAD + 'info.open(map,' +\n 'X' + museum.ID_ENTIDAD + 'marker);' + '});')\n if ranking[0][1] == 0:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n else:\n list = list + '</div>'\n list = (list +\n \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n )\n else:\n list = (list + \"<a class='titulos'><center>\" +\n 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!'\n + '</center></a></br></br></div>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\n@csrf_exempt\ndef museumsPage(request):\n template = get_template('museos.html')\n if request.method == 'GET':\n museos = Museo.objects.all()\n elif request.method == 'POST':\n distrito = Distrito.objects.get(nombre=request.POST['distrito'])\n museos = distrito.museo_set.all()\n list = ''\n markers = ''\n i = 1\n for museo in museos:\n list = (list + \"<center><a class='titulos'>\" + museo.NOMBRE +\n '</a></br>')\n list = (list + \"<a class='info' href=\" + '/museos/' + museo.\n ID_ENTIDAD + '/>Más información</a></center></br></br>')\n if (museo.LATITUD != 'No disponible' and museo.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museo.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museo.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museo.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museo.LATITUD + ', lng: ' + museo.\n LONGITUD + ' },map: map});' + 'X' + museo.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museo.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museo.ID_ENTIDAD +\n 'marker);' + '});')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n distritos = Distrito.objects.all()\n districtList = ''\n for distrito in distritos:\n districtList = (districtList + \"<option value='\" + distrito.nombre +\n \"'>\" + distrito.nombre + '</option>')\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'districtList': districtList,\n 'formato': style, 'markers': markers})))\n\n\n@csrf_exempt\ndef museumPage(request, museumID):\n template = get_template('museo.html')\n museum = Museo.objects.get(ID_ENTIDAD=museumID)\n if request.method == 'POST' and 'comentario' in request.POST:\n comment = Comentario(texto=request.POST['comentario'], museo=museum,\n usuario=request.user.username)\n comment.save()\n elif request.method == 'POST' and 'añadir' in request.POST:\n fav = Favorito(museo=museum, usuario=request.user)\n fav.save()\n elif request.method == 'POST' and 'quitar' in request.POST:\n Favorito.objects.filter(museo=museum, usuario=request.user).delete()\n elif request.method == 'POST' and 'mas' in request.POST:\n like = Like(museo=museum, usuario=request.user)\n like.save()\n elif request.method == 'POST' and 'menos' in request.POST:\n Like.objects.filter(museo=museum, usuario=request.user).delete()\n comments = museum.comentario_set.all()\n message = (\"<center><b><a class='titulos_museo'>\" + museum.NOMBRE +\n \"</a></b></center><div id='scroll'></br><center><b><a class='titulos_museo'>Descripción</a></b></center></br><center><a class='texto_museo'>\"\n + museum.DESCRIPCION_ENTIDAD +\n \"</a></center></br><center><b><a class='titulos_museo'>Horario</a></b></center></br><center><a class='texto_museo'>\"\n + museum.HORARIO +\n \"</a></center></br><center><b><a class='titulos_museo'>Accesibilidad</a></b></center></br><center><a class='texto_museo'>\"\n + museum.ACCESIBILIDAD +\n \"</a></center></br><center><b><a class='titulos_museo'>Dirección</a></b></center></br><center><a class='texto_museo'>\"\n + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.\n NUM + ', ' + museum.LOCALIDAD +\n \"</a><center></br><center><a class='texto_museo'>Barrio: \" + museum\n .BARRIO +\n \"</a></center></br><center><a class='texto_museo'>Distrito: \" + str\n (museum.DISTRITO) +\n \"</a></center></br><center><b><a class='titulos_museo'>Datos de contacto</a></b></center></br><center><a class='texto_museo'>Teléfono: \"\n + museum.TELEFONO +\n \"</a></center></br><center><a class='texto_museo'>Email: \" + museum\n .EMAIL +\n \"</a></center></br><center><b><a class='titulos_museo'>Comentarios</a></b></center></br>\"\n )\n allComments = ''\n for comment in comments:\n allComments = (allComments + \"<center><a class='texto_museo'><b>\" +\n 'Anónimo</b>: ' + comment.texto + ', ' + (datetime.timedelta(\n hours=2) + comment.fecha).strftime('%H:%M:%S %d-%m-%Y') +\n '</a></center></br>')\n message = message + allComments\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n favorito = Favorito.objects.get(museo=museum, usuario=request.user)\n favoriteButton = (\"<center><form action='/museos/\" + museumID +\n \"/' method='post'><input type='hidden' name='quitar' value='fav'>\"\n +\n \"<input class='desplegable' type='submit' value='Quitar de favoritos'></form></center>\"\n )\n except Favorito.DoesNotExist:\n favoriteButton = (\"<center><form action='/museos/\" + museumID +\n \"/' method='post'><input type='hidden' name='añadir' value='fav'>\"\n +\n \"<input class='desplegable' type='submit' value='Añadir a favoritos'></form></center>\"\n )\n try:\n like = Like.objects.get(museo=museum, usuario=request.user)\n likeButton = (\"<center><form action='/museos/\" + museumID +\n \"/' method='post'><input type='hidden' name='menos' value='like'>\"\n +\n \"<input class='desplegable' type='submit' value='Dislike'></form></center>\"\n )\n except Like.DoesNotExist:\n likeButton = (\"<center><form action='/museos/\" + museumID +\n \"/' method='post'><input type='hidden' name='mas' value='like'>\"\n +\n \"<input class='desplegable' type='submit' value='Like'></form></center>\"\n )\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n favoriteButton = ''\n likeButton = ''\n if (museum.LATITUD != 'No disponbile' and museum.LONGITUD !=\n 'No disponible'):\n marker = ('var ' + 'X' + museum.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museum.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museum.ID_ENTIDAD +\n 'marker = new google.maps.Marker({' + 'position: {lat: ' +\n museum.LATITUD + ', lng: ' + museum.LONGITUD + ' },map: map});' +\n 'X' + museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museum.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museum.ID_ENTIDAD +\n 'marker);' + '});')\n else:\n marker = ''\n return HttpResponse(template.render(Context({'body': message, 'login':\n login, 'user': request.user, 'id': museumID, 'fav': favoriteButton,\n 'like': likeButton, 'formato': style, 'marker': marker})))\n\n\n@csrf_exempt\ndef loginPage(request):\n if request.method == 'POST':\n if not request.user.is_authenticated() and 'login' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n elif not request.user.is_authenticated(\n ) and 'registro' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n try:\n user = User.objects.get(username=username)\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n except User.DoesNotExist:\n user = User.objects.create_user(username=username, password\n =password)\n user.save()\n request.method = 'GET'\n return mainPage(request)\n\n\ndef logoutPage(request):\n logout(request)\n return mainPage(request)\n\n\ndef userPage(request, user, number):\n if number == None:\n number = 1\n template = get_template('personal.html')\n listTotal = ''\n favoritos = Favorito.objects.filter(usuario=user)\n group = range(5)\n count = 0\n markers = ''\n for favorito in favoritos:\n count = count + 1\n museum = Museo.objects.get(NOMBRE=favorito.museo)\n listTotal = (listTotal + \"<a class='titulos' href=\" + museum.\n CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.\n comentario_set.count()) + ' Comentarios - ' + str(museum.\n like_set.count()) + ' Likes</b></br></br>')\n listTotal = (listTotal + \"<a class='direccion'>\" + museum.\n CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM +\n ', ' + museum.LOCALIDAD + '</a></br></br>')\n listTotal = (listTotal + \"<a class='info' href=\" + '/museos/' +\n museum.ID_ENTIDAD +\n '/>Más información</a> <b>Fecha de guardado:' + (datetime.\n timedelta(hours=2) + favorito.fecha).strftime(\n '%H:%M:%S %d-%m-%Y') + '</b></br></br></br>')\n if (museum.LATITUD != 'No disponible' and museum.LONGITUD !=\n 'No disponible'):\n markers = (markers + 'var ' + 'X' + museum.ID_ENTIDAD +\n 'info = new google.maps.InfoWindow({' + \"content:'<h1>\" +\n museum.NOMBRE + \"</h1>'});\" + 'var ' + 'X' + museum.\n ID_ENTIDAD + 'marker = new google.maps.Marker({' +\n 'position: {lat: ' + museum.LATITUD + ', lng: ' + museum.\n LONGITUD + ' },map: map});' + 'X' + museum.ID_ENTIDAD +\n \"marker.addListener('click', function() {\" + 'X' + museum.\n ID_ENTIDAD + 'info.open(map,' + 'X' + museum.ID_ENTIDAD +\n 'marker);' + '});')\n if count % 5 == 0:\n listTotal = listTotal + ';'\n group = listTotal.split(';')[int(number) - 1]\n list = ''\n if favoritos.count() % 5 == 0:\n pages = int(favoritos.count() / 5)\n else:\n pages = int(favoritos.count() / 5) + 1\n pagesRange = range(pages)\n if pages > 1:\n list = '<br>'\n if int(number) > 1:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(int(number) - 1) + \"'>«</a>\")\n else:\n list = (list + \"<center><div class='pagination'><a href='/\" +\n user + '/' + str(number) + \"'>«</a>\")\n for page in pagesRange:\n if page == int(number) - 1:\n list = list + \"<a class='active' href='/\" + user + '/' + str(\n page + 1) + \"'>\" + str(page + 1) + '</a>'\n else:\n list = list + \"<a href='/\" + user + '/' + str(page + 1\n ) + \"'>\" + str(page + 1) + '</a>'\n if int(number) == pages:\n list = list + \"<a href='/\" + user + '/' + str(number\n ) + \"'>»</a></div></center></br>\"\n else:\n list = list + \"<a href='/\" + user + '/' + str(int(number) + 1\n ) + \"'>»</a></div></center></br>\"\n list = list + \"<div id='scroll'><center>\"\n for item in group:\n list = list + item\n if (list == '' or list == \"<div id='scroll'><center>\"\n ) and user != 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para que aparezcan museos en esta página, ' + user +\n ' tiene que añadirlos.' + '</a></center></br></br>')\n elif (list == '' or list == \"<div id='scroll'><center>\"\n ) and user == 'AnonymousUser':\n list = (\"<center><a class='titulos'>\" +\n 'Para ver tu página personal, primero tienes que loguearte.' +\n '</a></center></br></br>')\n else:\n list = (list + \"<center><a class='info' href='/\" + user +\n \"/xml'>XML del usuario</a></center>\")\n list = list + '</center></div>'\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario=user.username)\n userList = (userList + \"<li><a href='/\" + user.username + \"'>\" +\n title.titulo + ' - ' + user.username + '</a></li></br>')\n except Titulo.DoesNotExist:\n userList = (userList + \"<li><a href='/\" + user.username +\n \"'>Página de \" + user.username + '</a></li></br>')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'body': list, 'login':\n login, 'user': request.user, 'userList': userList, 'formato': style,\n 'markers': markers})))\n\n\ndef userXMLPage(request, user):\n template = get_template('personalXML.xml')\n favoriteList = []\n favoriteMuseums = Favorito.objects.filter(usuario=user)\n for favorite in favoriteMuseums:\n favoriteList = favoriteList + [favorite.museo]\n return HttpResponse(template.render(Context({'favoriteList':\n favoriteList, 'user': user})), content_type='text/xml')\n\n\ndef XMLPage(request):\n template = get_template('personalXML.xml')\n user = ''\n topList = []\n topMuseums = getRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList,\n 'user': user})), content_type='text/xml')\n\n\ndef XMLAccesiblePage(request):\n template = get_template('personalXML.xml')\n user = ''\n topList = []\n topMuseums = getAccessibleRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD=topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList,\n 'user': user})), content_type='text/xml')\n\n\n@csrf_exempt\ndef preferencesPage(request, user):\n template = get_template('preferencias.html')\n if request.method == 'POST':\n if 'color' in request.POST:\n try:\n color = Color.objects.get(usuario=user)\n color.color = request.POST['color']\n except Color.DoesNotExist:\n color = Color(usuario=user, color=request.POST['color'])\n color.save()\n elif 'tamaño' in request.POST:\n try:\n size = Letra.objects.get(usuario=user)\n size.letra = request.POST['tamaño']\n except Letra.DoesNotExist:\n size = Letra(usuario=user, letra=request.POST['tamaño'])\n size.save()\n elif 'título' in request.POST:\n try:\n title = Titulo.objects.get(usuario=user)\n title.titulo = request.POST['título']\n except Titulo.DoesNotExist:\n title = Titulo(usuario=user, titulo=request.POST['título'])\n title.save()\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n user, 'formato': style})))\n\n\ndef aboutPage(request):\n template = get_template('about.html')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario=request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario=request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\n \"body{font-family: 'Helvetica', sans-serif;color: #444444;font-size: \"\n + letra + 'pt;background-color: #' + color + ';}')\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user':\n request.user, 'formato': style})))\n\n\ndef updateDB(request):\n museos = parseXML('web/museos.xml')\n for museo in museos:\n try:\n distrito = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except Distrito.DoesNotExist:\n distrito = Distrito(nombre=museos[museo]['DISTRITO'])\n distrito.save()\n for museo in museos:\n try:\n A = museos[museo]['ID-ENTIDAD']\n except KeyError:\n A = 'No disponible'\n try:\n B = museos[museo]['NOMBRE']\n except KeyError:\n B = 'No disponible'\n try:\n C = museos[museo]['DESCRIPCION-ENTIDAD']\n except KeyError:\n C = 'No disponible'\n try:\n D = museos[museo]['HORARIO']\n except KeyError:\n D = 'No disponible'\n try:\n E = museos[museo]['TRANSPORTE']\n except KeyError:\n E = 'No disponible'\n try:\n F = museos[museo]['ACCESIBILIDAD']\n except KeyError:\n F = 'No disponible'\n try:\n G = museos[museo]['CONTENT-URL']\n except KeyError:\n G = 'No disponible'\n try:\n H = museos[museo]['NOMBRE-VIA']\n except KeyError:\n H = 'No disponible'\n try:\n I = museos[museo]['CLASE-VIAL']\n except KeyError:\n I = 'No disponible'\n try:\n J = museos[museo]['TIPO-NUM']\n except KeyError:\n J = 'No disponible'\n try:\n K = museos[museo]['NUM']\n except KeyError:\n K = 'No disponible'\n try:\n L = museos[museo]['LOCALIDAD']\n except KeyError:\n L = 'No disponible'\n try:\n M = museos[museo]['PROVINCIA']\n except KeyError:\n M = 'No disponible'\n try:\n N = museos[museo]['CODIGO-POSTAL']\n except KeyError:\n N = 'No disponible'\n try:\n Ñ = museos[museo]['BARRIO']\n except KeyError:\n Ñ = 'No disponible'\n try:\n O = Distrito.objects.get(nombre=museos[museo]['DISTRITO'])\n except KeyError:\n O = 'No disponible'\n try:\n P = museos[museo]['COORDENADA-X']\n except KeyError:\n P = 'No disponible'\n try:\n Q = museos[museo]['COORDENADA-Y']\n except KeyError:\n Q = 'No disponible'\n try:\n R = museos[museo]['LATITUD']\n except KeyError:\n R = 'No disponible'\n try:\n S = museos[museo]['LONGITUD']\n except KeyError:\n S = 'No disponible'\n try:\n T = museos[museo]['TELEFONO']\n except KeyError:\n T = 'No disponible'\n try:\n U = museos[museo]['FAX']\n except KeyError:\n U = 'No disponible'\n try:\n V = museos[museo]['EMAIL']\n except KeyError:\n V = 'No disponible'\n try:\n W = museos[museo]['TIPO']\n except KeyError:\n W = 'No disponible'\n try:\n viejoMuseo = Museo.objects.get(ID_ENTIDAD=A)\n except Museo.DoesNotExist:\n nuevoMuseo = Museo(ID_ENTIDAD=A, NOMBRE=B, DESCRIPCION_ENTIDAD=\n C, HORARIO=D, TRANSPORTE=E, ACCESIBILIDAD=F, CONTENT_URL=G,\n NOMBRE_VIA=H, CLASE_VIAL=I, TIPO_NUM=J, NUM=K, LOCALIDAD=L,\n PROVINCIA=M, CODIGO_POSTAL=N, BARRIO=Ñ, DISTRITO=O,\n COORDENADA_X=P, COORDENADA_Y=Q, LATITUD=R, LONGITUD=S,\n TELEFONO=T, FAX=U, EMAIL=V, TIPO=W)\n nuevoMuseo.save()\n return mainPage(request)\n",
"step-5": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom django.contrib.auth.models import User\nfrom .models import Museo, Distrito, Comentario, Favorito, Like, Titulo, Letra, Color\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.contrib.auth import authenticate, login\nfrom django.contrib.auth import logout\nfrom web.parser import parseXML\nimport operator\nfrom django.template.loader import get_template\nfrom django.template import Context\nimport datetime\n\ndef getMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\ndef getAccessibleMuseums():\n museos = Museo.objects.all()\n allMuseums = {}\n for museo in museos:\n if museo.ACCESIBILIDAD == '1':\n allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()\n return allMuseums\n\ndef getRanking():\n allMuseums = getMuseums()\n ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\ndef getAccessibleRanking():\n allMuseums = getAccessibleMuseums()\n ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))\n ranking.reverse()\n return ranking\n\n@csrf_exempt\ndef mainPage(request):\n template = get_template('index.html')\n topFive = range(5)\n list = '<br>'\n markers = ''\n if request.method == 'GET' or (request.method == 'POST' and request.POST['accion'] == 'mostrar'):\n ranking = getRanking()\n list = (list + \"<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>\" +\n \"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>\")\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])\n list = list + \"<center><a class='titulos' href=\" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'\n list = list + \"<a class='direccion'>\" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n list = list + \"<a class='info' href=\" + \"/museos/\" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'\n if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':\n markers = (markers +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"info = new google.maps.InfoWindow({\" +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"marker = new google.maps.Marker({\" +\n \"position: {lat: \" + museum.LATITUD + \", lng: \" + museum.LONGITUD + \" },map: map});\" +\n \"X\" + museum.ID_ENTIDAD + \"marker.addListener('click', function() {\" +\n \"X\" + museum.ID_ENTIDAD + \"info.open(map,\" + \"X\" + museum.ID_ENTIDAD + \"marker);\" +\n \"});\")\n if ranking[0][1] == 0:\n list = list + \"<a class='titulos'><center>\" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'\n else:\n list = list + '</div>'\n list = list + \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n else:\n list = list + \"<a class='titulos'><center>\" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'\n elif request.method == 'POST' and request.POST['accion'] == 'ocultar':\n ranking = getAccessibleRanking()\n list = (list + \"<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>\" +\n \"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>\")\n if len(ranking) > 0:\n for item in topFive:\n if ranking[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])\n list = list + \"<center><a class='titulos' href=\" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'\n list = list + \"<a class='direccion'>\" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n list = list + \"<a class='info' href=\" + \"/museos/\" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'\n if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':\n markers = (markers +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"info = new google.maps.InfoWindow({\" +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"marker = new google.maps.Marker({\" +\n \"position: {lat: \" + museum.LATITUD + \", lng: \" + museum.LONGITUD + \" },map: map});\" +\n \"X\" + museum.ID_ENTIDAD + \"marker.addListener('click', function() {\" +\n \"X\" + museum.ID_ENTIDAD + \"info.open(map,\" + \"X\" + museum.ID_ENTIDAD + \"marker);\" +\n \"});\")\n if ranking[0][1] == 0:\n list = list + \"<a class='titulos'><center>\" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'\n else:\n list = list + '</div>'\n list = list + \"<center><a class='info' href='/xml'>XML de la página</a></center>\"\n else:\n list = list + \"<a class='titulos'><center>\" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario = user.username)\n userList = userList + \"<li><a href='/\" + user.username + \"'>\" + title.titulo + ' - ' + user.username + \"</a></li></br>\"\n except Titulo.DoesNotExist:\n userList = userList + \"<li><a href='/\" + user.username + \"'>Página de \" + user.username + \"</a></li></br>\"\n return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))\n\n@csrf_exempt\ndef museumsPage(request):\n template = get_template('museos.html')\n if request.method == 'GET':\n museos = Museo.objects.all()\n elif request.method == 'POST':\n distrito = Distrito.objects.get(nombre = request.POST['distrito'])\n museos = distrito.museo_set.all()\n list = ''\n markers = ''\n i = 1\n for museo in museos:\n list = list + \"<center><a class='titulos'>\" + museo.NOMBRE + '</a></br>'\n list = list + \"<a class='info' href=\" + \"/museos/\" + museo.ID_ENTIDAD + '/>Más información</a></center></br></br>'\n if museo.LATITUD != 'No disponible' and museo.LONGITUD != 'No disponible':\n markers = (markers +\n \"var \" + \"X\" + museo.ID_ENTIDAD + \"info = new google.maps.InfoWindow({\" +\n \"content:'<h1>\" + museo.NOMBRE + \"</h1>'});\" +\n \"var \" + \"X\" + museo.ID_ENTIDAD + \"marker = new google.maps.Marker({\" +\n \"position: {lat: \" + museo.LATITUD + \", lng: \" + museo.LONGITUD + \" },map: map});\" +\n \"X\" + museo.ID_ENTIDAD + \"marker.addListener('click', function() {\" +\n \"X\" + museo.ID_ENTIDAD + \"info.open(map,\" + \"X\" + museo.ID_ENTIDAD + \"marker);\" +\n \"});\")\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n distritos = Distrito.objects.all()\n districtList = ''\n for distrito in distritos:\n districtList = districtList + \"<option value='\" + distrito.nombre + \"'>\" + distrito.nombre + \"</option>\"\n return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'districtList': districtList, 'formato': style, 'markers': markers})))\n\n@csrf_exempt\ndef museumPage(request, museumID):\n template = get_template('museo.html')\n museum = Museo.objects.get(ID_ENTIDAD = museumID)\n if request.method == 'POST' and 'comentario' in request.POST:\n comment = Comentario(texto = request.POST['comentario'], museo = museum, usuario = request.user.username)\n comment.save()\n elif request.method == 'POST' and 'añadir' in request.POST:\n fav = Favorito(museo = museum, usuario = request.user)\n fav.save()\n elif request.method == 'POST' and 'quitar' in request.POST:\n Favorito.objects.filter(museo = museum, usuario = request.user).delete()\n elif request.method == 'POST' and 'mas' in request.POST:\n like = Like(museo = museum, usuario = request.user)\n like.save()\n elif request.method == 'POST' and 'menos' in request.POST:\n Like.objects.filter(museo = museum, usuario = request.user).delete()\n comments = museum.comentario_set.all()\n message = (\"<center><b><a class='titulos_museo'>\" + museum.NOMBRE + \"</a></b></center><div id='scroll'></br>\"\n \"<center><b><a class='titulos_museo'>Descripción</a></b></center></br>\"\n \"<center><a class='texto_museo'>\" + museum.DESCRIPCION_ENTIDAD + '</a></center></br>'\n \"<center><b><a class='titulos_museo'>Horario</a></b></center></br>\"\n \"<center><a class='texto_museo'>\" + museum.HORARIO + '</a></center></br>'\n \"<center><b><a class='titulos_museo'>Accesibilidad</a></b></center></br>\"\n \"<center><a class='texto_museo'>\" + museum.ACCESIBILIDAD + '</a></center></br>'\n \"<center><b><a class='titulos_museo'>Dirección</a></b></center></br>\"\n \"<center><a class='texto_museo'>\" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a><center></br>'\n \"<center><a class='texto_museo'>Barrio: \" + museum.BARRIO + '</a></center></br>'\n \"<center><a class='texto_museo'>Distrito: \" + str(museum.DISTRITO) + '</a></center></br>'\n \"<center><b><a class='titulos_museo'>Datos de contacto</a></b></center></br>\"\n \"<center><a class='texto_museo'>Teléfono: \" + museum.TELEFONO + '</a></center></br>'\n \"<center><a class='texto_museo'>Email: \" + museum.EMAIL + '</a></center></br>'\n \"<center><b><a class='titulos_museo'>Comentarios</a></b></center></br>\")\n allComments = ''\n for comment in comments:\n allComments = allComments + \"<center><a class='texto_museo'><b>\" + 'Anónimo</b>: ' + comment.texto + ', ' + (datetime.timedelta(hours=2) + comment.fecha).strftime(\"%H:%M:%S %d-%m-%Y\") + '</a></center></br>'\n message = message + allComments\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n favorito = Favorito.objects.get(museo = museum, usuario = request.user)\n favoriteButton = (\"<center><form action='/museos/\" + museumID + \"/' method='post'><input type='hidden' name='quitar' value='fav'>\" +\n \"<input class='desplegable' type='submit' value='Quitar de favoritos'></form></center>\")\n except Favorito.DoesNotExist:\n favoriteButton = (\"<center><form action='/museos/\" + museumID + \"/' method='post'><input type='hidden' name='añadir' value='fav'>\" +\n \"<input class='desplegable' type='submit' value='Añadir a favoritos'></form></center>\")\n try:\n like = Like.objects.get(museo = museum, usuario = request.user)\n likeButton = (\"<center><form action='/museos/\" + museumID + \"/' method='post'><input type='hidden' name='menos' value='like'>\" +\n \"<input class='desplegable' type='submit' value='Dislike'></form></center>\")\n except Like.DoesNotExist:\n likeButton = (\"<center><form action='/museos/\" + museumID + \"/' method='post'><input type='hidden' name='mas' value='like'>\" +\n \"<input class='desplegable' type='submit' value='Like'></form></center>\")\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n favoriteButton = ''\n likeButton = ''\n if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':\n marker = (\"var \" + \"X\" + museum.ID_ENTIDAD + \"info = new google.maps.InfoWindow({\" +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"marker = new google.maps.Marker({\" +\n \"position: {lat: \" + museum.LATITUD + \", lng: \" + museum.LONGITUD + \" },map: map});\" +\n \"X\" + museum.ID_ENTIDAD + \"marker.addListener('click', function() {\" +\n \"X\" + museum.ID_ENTIDAD + \"info.open(map,\" + \"X\" + museum.ID_ENTIDAD + \"marker);\" +\n \"});\")\n else:\n marker = ''\n return HttpResponse(template.render(Context({'body': message, 'login': login, 'user': request.user, 'id': museumID, 'fav': favoriteButton, 'like': likeButton, 'formato': style, 'marker': marker})))\n\n@csrf_exempt\ndef loginPage(request):\n if request.method == 'POST':\n if not request.user.is_authenticated() and 'login' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n elif not request.user.is_authenticated() and 'registro' in request.POST:\n username = request.POST['Usuario']\n password = request.POST['Contraseña']\n try:\n user = User.objects.get(username = username)\n user = authenticate(username = username, password = password)\n if user is not None:\n login(request, user)\n except User.DoesNotExist:\n user = User.objects.create_user(username = username, password = password)\n user.save()\n request.method = 'GET'\n return mainPage(request)\n\ndef logoutPage(request):\n logout(request)\n return mainPage(request)\n\ndef userPage(request, user, number):\n if number == None:\n number = 1\n template = get_template('personal.html')\n listTotal = ''\n favoritos = Favorito.objects.filter(usuario = user)\n group = range(5)\n count = 0;\n markers = ''\n for favorito in favoritos:\n count = count + 1;\n museum = Museo.objects.get(NOMBRE = favorito.museo)\n listTotal = listTotal + \"<a class='titulos' href=\" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'\n listTotal = listTotal + \"<a class='direccion'>\" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'\n listTotal = listTotal + \"<a class='info' href=\" + \"/museos/\" + museum.ID_ENTIDAD + '/>Más información</a> <b>Fecha de guardado:' + (datetime.timedelta(hours=2) + favorito.fecha).strftime(\"%H:%M:%S %d-%m-%Y\") + '</b></br></br></br>'\n if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':\n markers = (markers +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"info = new google.maps.InfoWindow({\" +\n \"content:'<h1>\" + museum.NOMBRE + \"</h1>'});\" +\n \"var \" + \"X\" + museum.ID_ENTIDAD + \"marker = new google.maps.Marker({\" +\n \"position: {lat: \" + museum.LATITUD + \", lng: \" + museum.LONGITUD + \" },map: map});\" +\n \"X\" + museum.ID_ENTIDAD + \"marker.addListener('click', function() {\" +\n \"X\" + museum.ID_ENTIDAD + \"info.open(map,\" + \"X\" + museum.ID_ENTIDAD + \"marker);\" +\n \"});\")\n if (count % 5) == 0:\n listTotal = listTotal + ';'\n group = listTotal.split(';')[int(number) - 1]\n list = ''\n if (favoritos.count() % 5) == 0:\n pages = int(favoritos.count() / 5)\n else:\n pages = int(favoritos.count() / 5) + 1\n pagesRange = range(pages)\n if pages > 1:\n list = '<br>'\n if int(number) > 1:\n list = list + \"<center><div class='pagination'><a href='/\" + user + \"/\" + str(int(number) - 1) + \"'>«</a>\"\n else:\n list = list + \"<center><div class='pagination'><a href='/\" + user + \"/\" + str(number) + \"'>«</a>\"\n for page in pagesRange:\n if page == (int(number) - 1):\n list = list + \"<a class='active' href='/\" + user + \"/\" + str(page + 1) + \"'>\" + str(page + 1) + \"</a>\"\n else:\n list = list + \"<a href='/\" + user + \"/\" + str(page + 1) + \"'>\" + str(page + 1) + \"</a>\"\n if int(number) == pages:\n list = list + \"<a href='/\" + user + \"/\" + str(number) + \"'>»</a></div></center></br>\"\n else:\n list = list + \"<a href='/\" + user + \"/\" + str(int(number) + 1) + \"'>»</a></div></center></br>\"\n list = list + \"<div id='scroll'><center>\"\n for item in group:\n list = list + item\n if (list == '' or list == \"<div id='scroll'><center>\") and user != 'AnonymousUser':\n list = \"<center><a class='titulos'>\" + 'Para que aparezcan museos en esta página, ' + user + ' tiene que añadirlos.' + '</a></center></br></br>'\n elif (list == '' or list == \"<div id='scroll'><center>\") and user == 'AnonymousUser':\n list = \"<center><a class='titulos'>\" + 'Para ver tu página personal, primero tienes que loguearte.' + '</a></center></br></br>'\n else:\n list = list + \"<center><a class='info' href='/\" + user + \"/xml'>XML del usuario</a></center>\"\n list = list + '</center></div>'\n users = User.objects.all()\n userList = ''\n for user in users:\n try:\n title = Titulo.objects.get(usuario = user.username)\n userList = userList + \"<li><a href='/\" + user.username + \"'>\" + title.titulo + ' - ' + user.username + \"</a></li></br>\"\n except Titulo.DoesNotExist:\n userList = userList + \"<li><a href='/\" + user.username + \"'>Página de \" + user.username + \"</a></li></br>\"\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))\n\ndef userXMLPage(request, user):\n template = get_template(\"personalXML.xml\")\n favoriteList = []\n favoriteMuseums = Favorito.objects.filter(usuario = user)\n for favorite in favoriteMuseums:\n favoriteList = favoriteList + [favorite.museo]\n return HttpResponse(template.render(Context({'favoriteList': favoriteList, 'user': user})), content_type = \"text/xml\")\n\ndef XMLPage(request):\n template = get_template(\"personalXML.xml\")\n user = ''\n topList = []\n topMuseums = getRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = \"text/xml\")\n\ndef XMLAccesiblePage(request):\n template = get_template(\"personalXML.xml\")\n user = ''\n topList = []\n topMuseums = getAccessibleRanking()\n topFive = range(5)\n for item in topFive:\n if topMuseums[item][1] != 0:\n museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])\n topList = topList + [museum]\n return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = \"text/xml\")\n\n\n@csrf_exempt\ndef preferencesPage(request, user):\n template = get_template(\"preferencias.html\")\n if request.method == 'POST':\n if 'color' in request.POST:\n try:\n color = Color.objects.get(usuario = user)\n color.color = request.POST['color']\n except Color.DoesNotExist:\n color = Color(usuario = user, color = request.POST['color'])\n color.save()\n elif 'tamaño' in request.POST:\n try:\n size = Letra.objects.get(usuario = user)\n size.letra = request.POST['tamaño']\n except Letra.DoesNotExist:\n size = Letra(usuario = user, letra = request.POST['tamaño'])\n size.save()\n elif 'título' in request.POST:\n try:\n title = Titulo.objects.get(usuario = user)\n title.titulo = request.POST['título']\n except Titulo.DoesNotExist:\n title = Titulo(usuario = user, titulo = request.POST['título'])\n title.save()\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user': user, 'formato': style})))\n\ndef aboutPage(request):\n template = get_template('about.html')\n style = ''\n if request.user.is_authenticated():\n login = 1\n try:\n color = Color.objects.get(usuario = request.user)\n color = color.color\n except Color.DoesNotExist:\n color = 'EEF4F8'\n try:\n letra = Letra.objects.get(usuario = request.user)\n letra = letra.letra\n except Letra.DoesNotExist:\n letra = '9'\n style = (\"body{font-family: 'Helvetica', sans-serif;\"\n \"color: #444444;\"\n \"font-size: \" + letra + \"pt;\"\n \"background-color: #\" + color + \";}\")\n else:\n login = 0\n return HttpResponse(template.render(Context({'login': login, 'user': request.user, 'formato': style})))\n\ndef updateDB(request):\n #Museo.objects.all().delete()\n museos = parseXML('web/museos.xml')\n for museo in museos:\n try:\n distrito = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])\n except Distrito.DoesNotExist:\n distrito = Distrito(nombre = museos[museo]['DISTRITO'])\n distrito.save()\n for museo in museos:\n try:\n A = museos[museo]['ID-ENTIDAD']\n except KeyError:\n A = 'No disponible'\n try:\n B = museos[museo]['NOMBRE']\n except KeyError:\n B = 'No disponible'\n try:\n C = museos[museo]['DESCRIPCION-ENTIDAD']\n except KeyError:\n C = 'No disponible'\n try:\n D = museos[museo]['HORARIO']\n except KeyError:\n D = 'No disponible'\n try:\n E = museos[museo]['TRANSPORTE']\n except KeyError:\n E = 'No disponible'\n try:\n F = museos[museo]['ACCESIBILIDAD']\n except KeyError:\n F = 'No disponible'\n try:\n G = museos[museo]['CONTENT-URL']\n except KeyError:\n G = 'No disponible'\n try:\n H = museos[museo]['NOMBRE-VIA']\n except KeyError:\n H = 'No disponible'\n try:\n I = museos[museo]['CLASE-VIAL']\n except KeyError:\n I = 'No disponible'\n try:\n J = museos[museo]['TIPO-NUM']\n except KeyError:\n J = 'No disponible'\n try:\n K = museos[museo]['NUM']\n except KeyError:\n K = 'No disponible'\n try:\n L = museos[museo]['LOCALIDAD']\n except KeyError:\n L = 'No disponible'\n try:\n M = museos[museo]['PROVINCIA']\n except KeyError:\n M = 'No disponible'\n try:\n N = museos[museo]['CODIGO-POSTAL']\n except KeyError:\n N = 'No disponible'\n try:\n Ñ = museos[museo]['BARRIO']\n except KeyError:\n Ñ = 'No disponible'\n try:\n O = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])\n except KeyError:\n O = 'No disponible'\n try:\n P = museos[museo]['COORDENADA-X']\n except KeyError:\n P = 'No disponible'\n try:\n Q = museos[museo]['COORDENADA-Y']\n except KeyError:\n Q = 'No disponible'\n try:\n R = museos[museo]['LATITUD']\n except KeyError:\n R = 'No disponible'\n try:\n S = museos[museo]['LONGITUD']\n except KeyError:\n S = 'No disponible'\n try:\n T = museos[museo]['TELEFONO']\n except KeyError:\n T = 'No disponible'\n try:\n U = museos[museo]['FAX']\n except KeyError:\n U = 'No disponible'\n try:\n V = museos[museo]['EMAIL']\n except KeyError:\n V = 'No disponible'\n try:\n W = museos[museo]['TIPO']\n except KeyError:\n W = 'No disponible'\n try:\n viejoMuseo = Museo.objects.get(ID_ENTIDAD = A)\n except Museo.DoesNotExist:\n nuevoMuseo = Museo(\n ID_ENTIDAD = A,\n NOMBRE = B,\n DESCRIPCION_ENTIDAD = C,\n HORARIO = D,\n TRANSPORTE = E,\n ACCESIBILIDAD = F,\n CONTENT_URL = G,\n NOMBRE_VIA = H,\n CLASE_VIAL = I,\n TIPO_NUM = J,\n NUM = K,\n LOCALIDAD = L,\n PROVINCIA = M,\n CODIGO_POSTAL = N,\n BARRIO = Ñ,\n DISTRITO = O,\n COORDENADA_X = P,\n COORDENADA_Y = Q,\n LATITUD = R,\n LONGITUD = S,\n TELEFONO = T,\n FAX = U,\n EMAIL = V,\n TIPO = W)\n nuevoMuseo.save()\n return mainPage(request)\n",
"step-ids": [
10,
11,
14,
17,
18
]
}
|
[
10,
11,
14,
17,
18
] |
# Copyright 2010 Google Inc. All Rights Reserved.
#
import copy
import logging
import threading
from automation.common import command as cmd
from automation.common import logger
from automation.common.command_executer import CommandExecuter
from automation.common import job
from automation.common import job_group
from automation.server.job_manager import IdProducerPolicy
class JobGroupManager(object):
def __init__(self, job_manager):
self.all_job_groups = []
self.job_manager = job_manager
self.job_manager.AddListener(self)
self._lock = threading.Lock()
self._job_group_finished = threading.Condition(self._lock)
self._id_producer = IdProducerPolicy()
self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,
'job-group-(?P<id>\d+)')
self._logger = logging.getLogger(self.__class__.__name__)
def GetJobGroup(self, group_id):
with self._lock:
for group in self.all_job_groups:
if group.id == group_id:
return group
return None
def GetAllJobGroups(self):
with self._lock:
return copy.deepcopy(self.all_job_groups)
def AddJobGroup(self, group):
with self._lock:
group.id = self._id_producer.GetNextId()
self._logger.debug('Creating runtime environment for %r.', group)
CommandExecuter().RunCommand(cmd.Chain(
cmd.RmTree(group.home_dir), cmd.MakeDir(group.home_dir)))
with self._lock:
self.all_job_groups.append(group)
for job_ in group.jobs:
self.job_manager.AddJob(job_)
group.status = job_group.STATUS_EXECUTING
self._logger.info('Added %r to queue.', group)
return group.id
def KillJobGroup(self, group):
with self._lock:
self._logger.debug('Killing all jobs that belong to %r.', group)
for job_ in group.jobs:
self.job_manager.KillJob(job_)
self._logger.debug('Waiting for jobs to quit.')
# Lets block until the group is killed so we know it is completed
# when we return.
while group.status not in [job_group.STATUS_SUCCEEDED,
job_group.STATUS_FAILED]:
self._job_group_finished.wait()
def NotifyJobComplete(self, job_):
self._logger.debug('Handling %r completion event.', job_)
group = job_.group
with self._lock:
# We need to perform an action only if the group hasn't already failed.
if group.status != job_group.STATUS_FAILED:
if job_.status == job.STATUS_FAILED:
# We have a failed job, abort the job group
group.status = job_group.STATUS_FAILED
if group.cleanup_on_failure:
for job_ in group.jobs:
# TODO(bjanakiraman): We should probably only kill dependent jobs
# instead of the whole job group.
self.job_manager.KillJob(job_)
self.job_manager.CleanUpJob(job_)
else:
# The job succeeded successfully -- lets check to see if we are done.
assert job_.status == job.STATUS_SUCCEEDED
finished = True
for other_job in group.jobs:
assert other_job.status != job.STATUS_FAILED
if other_job.status != job.STATUS_SUCCEEDED:
finished = False
break
if finished and group.status != job_group.STATUS_SUCCEEDED:
# TODO(kbaclawski): Without check performed above following code
# could be called more than once. This would trigger StateMachine
# crash, because it cannot transition from STATUS_SUCCEEDED to
# STATUS_SUCCEEDED. Need to address that bug in near future.
group.status = job_group.STATUS_SUCCEEDED
if group.cleanup_on_completion:
for job_ in group.jobs:
self.job_manager.CleanUpJob(job_)
self._job_group_finished.notifyAll()
|
normal
|
{
"blob_id": "720ec6c222659a13d4a0f3cf9096b70ce6e2b2b3",
"index": 175,
"step-1": "<mask token>\n\n\nclass JobGroupManager(object):\n <mask token>\n\n def GetJobGroup(self, group_id):\n with self._lock:\n for group in self.all_job_groups:\n if group.id == group_id:\n return group\n return None\n <mask token>\n <mask token>\n <mask token>\n\n def NotifyJobComplete(self, job_):\n self._logger.debug('Handling %r completion event.', job_)\n group = job_.group\n with self._lock:\n if group.status != job_group.STATUS_FAILED:\n if job_.status == job.STATUS_FAILED:\n group.status = job_group.STATUS_FAILED\n if group.cleanup_on_failure:\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self.job_manager.CleanUpJob(job_)\n else:\n assert job_.status == job.STATUS_SUCCEEDED\n finished = True\n for other_job in group.jobs:\n assert other_job.status != job.STATUS_FAILED\n if other_job.status != job.STATUS_SUCCEEDED:\n finished = False\n break\n if finished and group.status != job_group.STATUS_SUCCEEDED:\n group.status = job_group.STATUS_SUCCEEDED\n if group.cleanup_on_completion:\n for job_ in group.jobs:\n self.job_manager.CleanUpJob(job_)\n self._job_group_finished.notifyAll()\n",
"step-2": "<mask token>\n\n\nclass JobGroupManager(object):\n\n def __init__(self, job_manager):\n self.all_job_groups = []\n self.job_manager = job_manager\n self.job_manager.AddListener(self)\n self._lock = threading.Lock()\n self._job_group_finished = threading.Condition(self._lock)\n self._id_producer = IdProducerPolicy()\n self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,\n 'job-group-(?P<id>\\\\d+)')\n self._logger = logging.getLogger(self.__class__.__name__)\n\n def GetJobGroup(self, group_id):\n with self._lock:\n for group in self.all_job_groups:\n if group.id == group_id:\n return group\n return None\n\n def GetAllJobGroups(self):\n with self._lock:\n return copy.deepcopy(self.all_job_groups)\n\n def AddJobGroup(self, group):\n with self._lock:\n group.id = self._id_producer.GetNextId()\n self._logger.debug('Creating runtime environment for %r.', group)\n CommandExecuter().RunCommand(cmd.Chain(cmd.RmTree(group.home_dir),\n cmd.MakeDir(group.home_dir)))\n with self._lock:\n self.all_job_groups.append(group)\n for job_ in group.jobs:\n self.job_manager.AddJob(job_)\n group.status = job_group.STATUS_EXECUTING\n self._logger.info('Added %r to queue.', group)\n return group.id\n <mask token>\n\n def NotifyJobComplete(self, job_):\n self._logger.debug('Handling %r completion event.', job_)\n group = job_.group\n with self._lock:\n if group.status != job_group.STATUS_FAILED:\n if job_.status == job.STATUS_FAILED:\n group.status = job_group.STATUS_FAILED\n if group.cleanup_on_failure:\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self.job_manager.CleanUpJob(job_)\n else:\n assert job_.status == job.STATUS_SUCCEEDED\n finished = True\n for other_job in group.jobs:\n assert other_job.status != job.STATUS_FAILED\n if other_job.status != job.STATUS_SUCCEEDED:\n finished = False\n break\n if finished and group.status != job_group.STATUS_SUCCEEDED:\n group.status = job_group.STATUS_SUCCEEDED\n if group.cleanup_on_completion:\n for job_ in group.jobs:\n self.job_manager.CleanUpJob(job_)\n self._job_group_finished.notifyAll()\n",
"step-3": "<mask token>\n\n\nclass JobGroupManager(object):\n\n def __init__(self, job_manager):\n self.all_job_groups = []\n self.job_manager = job_manager\n self.job_manager.AddListener(self)\n self._lock = threading.Lock()\n self._job_group_finished = threading.Condition(self._lock)\n self._id_producer = IdProducerPolicy()\n self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,\n 'job-group-(?P<id>\\\\d+)')\n self._logger = logging.getLogger(self.__class__.__name__)\n\n def GetJobGroup(self, group_id):\n with self._lock:\n for group in self.all_job_groups:\n if group.id == group_id:\n return group\n return None\n\n def GetAllJobGroups(self):\n with self._lock:\n return copy.deepcopy(self.all_job_groups)\n\n def AddJobGroup(self, group):\n with self._lock:\n group.id = self._id_producer.GetNextId()\n self._logger.debug('Creating runtime environment for %r.', group)\n CommandExecuter().RunCommand(cmd.Chain(cmd.RmTree(group.home_dir),\n cmd.MakeDir(group.home_dir)))\n with self._lock:\n self.all_job_groups.append(group)\n for job_ in group.jobs:\n self.job_manager.AddJob(job_)\n group.status = job_group.STATUS_EXECUTING\n self._logger.info('Added %r to queue.', group)\n return group.id\n\n def KillJobGroup(self, group):\n with self._lock:\n self._logger.debug('Killing all jobs that belong to %r.', group)\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self._logger.debug('Waiting for jobs to quit.')\n while group.status not in [job_group.STATUS_SUCCEEDED,\n job_group.STATUS_FAILED]:\n self._job_group_finished.wait()\n\n def NotifyJobComplete(self, job_):\n self._logger.debug('Handling %r completion event.', job_)\n group = job_.group\n with self._lock:\n if group.status != job_group.STATUS_FAILED:\n if job_.status == job.STATUS_FAILED:\n group.status = job_group.STATUS_FAILED\n if group.cleanup_on_failure:\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self.job_manager.CleanUpJob(job_)\n else:\n assert job_.status == job.STATUS_SUCCEEDED\n finished = True\n for other_job in group.jobs:\n assert other_job.status != job.STATUS_FAILED\n if other_job.status != job.STATUS_SUCCEEDED:\n finished = False\n break\n if finished and group.status != job_group.STATUS_SUCCEEDED:\n group.status = job_group.STATUS_SUCCEEDED\n if group.cleanup_on_completion:\n for job_ in group.jobs:\n self.job_manager.CleanUpJob(job_)\n self._job_group_finished.notifyAll()\n",
"step-4": "import copy\nimport logging\nimport threading\nfrom automation.common import command as cmd\nfrom automation.common import logger\nfrom automation.common.command_executer import CommandExecuter\nfrom automation.common import job\nfrom automation.common import job_group\nfrom automation.server.job_manager import IdProducerPolicy\n\n\nclass JobGroupManager(object):\n\n def __init__(self, job_manager):\n self.all_job_groups = []\n self.job_manager = job_manager\n self.job_manager.AddListener(self)\n self._lock = threading.Lock()\n self._job_group_finished = threading.Condition(self._lock)\n self._id_producer = IdProducerPolicy()\n self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,\n 'job-group-(?P<id>\\\\d+)')\n self._logger = logging.getLogger(self.__class__.__name__)\n\n def GetJobGroup(self, group_id):\n with self._lock:\n for group in self.all_job_groups:\n if group.id == group_id:\n return group\n return None\n\n def GetAllJobGroups(self):\n with self._lock:\n return copy.deepcopy(self.all_job_groups)\n\n def AddJobGroup(self, group):\n with self._lock:\n group.id = self._id_producer.GetNextId()\n self._logger.debug('Creating runtime environment for %r.', group)\n CommandExecuter().RunCommand(cmd.Chain(cmd.RmTree(group.home_dir),\n cmd.MakeDir(group.home_dir)))\n with self._lock:\n self.all_job_groups.append(group)\n for job_ in group.jobs:\n self.job_manager.AddJob(job_)\n group.status = job_group.STATUS_EXECUTING\n self._logger.info('Added %r to queue.', group)\n return group.id\n\n def KillJobGroup(self, group):\n with self._lock:\n self._logger.debug('Killing all jobs that belong to %r.', group)\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self._logger.debug('Waiting for jobs to quit.')\n while group.status not in [job_group.STATUS_SUCCEEDED,\n job_group.STATUS_FAILED]:\n self._job_group_finished.wait()\n\n def NotifyJobComplete(self, job_):\n self._logger.debug('Handling %r completion event.', job_)\n group = job_.group\n with self._lock:\n if group.status != job_group.STATUS_FAILED:\n if job_.status == job.STATUS_FAILED:\n group.status = job_group.STATUS_FAILED\n if group.cleanup_on_failure:\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n self.job_manager.CleanUpJob(job_)\n else:\n assert job_.status == job.STATUS_SUCCEEDED\n finished = True\n for other_job in group.jobs:\n assert other_job.status != job.STATUS_FAILED\n if other_job.status != job.STATUS_SUCCEEDED:\n finished = False\n break\n if finished and group.status != job_group.STATUS_SUCCEEDED:\n group.status = job_group.STATUS_SUCCEEDED\n if group.cleanup_on_completion:\n for job_ in group.jobs:\n self.job_manager.CleanUpJob(job_)\n self._job_group_finished.notifyAll()\n",
"step-5": "# Copyright 2010 Google Inc. All Rights Reserved.\n#\n\nimport copy\nimport logging\nimport threading\n\nfrom automation.common import command as cmd\nfrom automation.common import logger\nfrom automation.common.command_executer import CommandExecuter\nfrom automation.common import job\nfrom automation.common import job_group\nfrom automation.server.job_manager import IdProducerPolicy\n\n\nclass JobGroupManager(object):\n\n def __init__(self, job_manager):\n self.all_job_groups = []\n\n self.job_manager = job_manager\n self.job_manager.AddListener(self)\n\n self._lock = threading.Lock()\n self._job_group_finished = threading.Condition(self._lock)\n\n self._id_producer = IdProducerPolicy()\n self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,\n 'job-group-(?P<id>\\d+)')\n\n self._logger = logging.getLogger(self.__class__.__name__)\n\n def GetJobGroup(self, group_id):\n with self._lock:\n for group in self.all_job_groups:\n if group.id == group_id:\n return group\n\n return None\n\n def GetAllJobGroups(self):\n with self._lock:\n return copy.deepcopy(self.all_job_groups)\n\n def AddJobGroup(self, group):\n with self._lock:\n group.id = self._id_producer.GetNextId()\n\n self._logger.debug('Creating runtime environment for %r.', group)\n\n CommandExecuter().RunCommand(cmd.Chain(\n cmd.RmTree(group.home_dir), cmd.MakeDir(group.home_dir)))\n\n with self._lock:\n self.all_job_groups.append(group)\n\n for job_ in group.jobs:\n self.job_manager.AddJob(job_)\n\n group.status = job_group.STATUS_EXECUTING\n\n self._logger.info('Added %r to queue.', group)\n\n return group.id\n\n def KillJobGroup(self, group):\n with self._lock:\n self._logger.debug('Killing all jobs that belong to %r.', group)\n\n for job_ in group.jobs:\n self.job_manager.KillJob(job_)\n\n self._logger.debug('Waiting for jobs to quit.')\n\n # Lets block until the group is killed so we know it is completed\n # when we return.\n while group.status not in [job_group.STATUS_SUCCEEDED,\n job_group.STATUS_FAILED]:\n self._job_group_finished.wait()\n\n def NotifyJobComplete(self, job_):\n self._logger.debug('Handling %r completion event.', job_)\n\n group = job_.group\n\n with self._lock:\n # We need to perform an action only if the group hasn't already failed.\n if group.status != job_group.STATUS_FAILED:\n if job_.status == job.STATUS_FAILED:\n # We have a failed job, abort the job group\n group.status = job_group.STATUS_FAILED\n if group.cleanup_on_failure:\n for job_ in group.jobs:\n # TODO(bjanakiraman): We should probably only kill dependent jobs\n # instead of the whole job group.\n self.job_manager.KillJob(job_)\n self.job_manager.CleanUpJob(job_)\n else:\n # The job succeeded successfully -- lets check to see if we are done.\n assert job_.status == job.STATUS_SUCCEEDED\n finished = True\n for other_job in group.jobs:\n assert other_job.status != job.STATUS_FAILED\n if other_job.status != job.STATUS_SUCCEEDED:\n finished = False\n break\n\n if finished and group.status != job_group.STATUS_SUCCEEDED:\n # TODO(kbaclawski): Without check performed above following code\n # could be called more than once. This would trigger StateMachine\n # crash, because it cannot transition from STATUS_SUCCEEDED to\n # STATUS_SUCCEEDED. Need to address that bug in near future.\n group.status = job_group.STATUS_SUCCEEDED\n if group.cleanup_on_completion:\n for job_ in group.jobs:\n self.job_manager.CleanUpJob(job_)\n\n self._job_group_finished.notifyAll()\n",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def make_scatter(df):
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.show()
return
def train_predict_1d(df, test):
regressor = DecisionTreeRegressor(max_depth=2)
regressor.fit(np.array([df['Start station number']]).T, df['Counts'])
xx = np.array([test['Start station number']]).T
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'
)
plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=
'prediction')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.legend()
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))
return
def train_predict_2d(df, test):
regressor = DecisionTreeRegressor()
regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])
nx = 30
ny = 30
x_station = np.linspace(30800, 32300, nx)
y_day = np.linspace(0, 3, ny)
xx, yy = np.meshgrid(x_station, y_day)
z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)
zz = np.reshape(z_counts, (nx, ny))
fig = plt.figure(figsize=(8, 8))
plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)
plt.colorbar(label='bikes predicted')
plt.xlim(np.min(x_station), np.max(x_station))
plt.ylim(np.min(y_day), np.max(y_day))
plt.xlabel('Start station number')
plt.ylabel('Quarter')
print('Mean Absolute Error')
print(mean_absolute_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']])))
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']]))))
return
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
df = pandas.read_csv('2016Q1')
df = df.append(pandas.read_csv('2016Q2'))
df = df.append(pandas.read_csv('2016Q3'))
df = df.append(pandas.read_csv('2016Q4'))
test = pandas.read_csv('2017Q1')
test = test.append(pandas.read_csv('2017Q2'))
test = test.append(pandas.read_csv('2017Q3'))
test = test.append(pandas.read_csv('2017Q4'))
train_predict_1d(df, test)
return
def make_scatter(df):
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.show()
return
def train_predict_1d(df, test):
regressor = DecisionTreeRegressor(max_depth=2)
regressor.fit(np.array([df['Start station number']]).T, df['Counts'])
xx = np.array([test['Start station number']]).T
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'
)
plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=
'prediction')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.legend()
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))
return
def train_predict_2d(df, test):
regressor = DecisionTreeRegressor()
regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])
nx = 30
ny = 30
x_station = np.linspace(30800, 32300, nx)
y_day = np.linspace(0, 3, ny)
xx, yy = np.meshgrid(x_station, y_day)
z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)
zz = np.reshape(z_counts, (nx, ny))
fig = plt.figure(figsize=(8, 8))
plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)
plt.colorbar(label='bikes predicted')
plt.xlim(np.min(x_station), np.max(x_station))
plt.ylim(np.min(y_day), np.max(y_day))
plt.xlabel('Start station number')
plt.ylabel('Quarter')
print('Mean Absolute Error')
print(mean_absolute_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']])))
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']]))))
return
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
df = pandas.read_csv('2016Q1')
df = df.append(pandas.read_csv('2016Q2'))
df = df.append(pandas.read_csv('2016Q3'))
df = df.append(pandas.read_csv('2016Q4'))
test = pandas.read_csv('2017Q1')
test = test.append(pandas.read_csv('2017Q2'))
test = test.append(pandas.read_csv('2017Q3'))
test = test.append(pandas.read_csv('2017Q4'))
train_predict_1d(df, test)
return
def make_scatter(df):
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.show()
return
def train_predict_1d(df, test):
regressor = DecisionTreeRegressor(max_depth=2)
regressor.fit(np.array([df['Start station number']]).T, df['Counts'])
xx = np.array([test['Start station number']]).T
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'
)
plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=
'prediction')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.legend()
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))
return
def train_predict_2d(df, test):
regressor = DecisionTreeRegressor()
regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])
nx = 30
ny = 30
x_station = np.linspace(30800, 32300, nx)
y_day = np.linspace(0, 3, ny)
xx, yy = np.meshgrid(x_station, y_day)
z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)
zz = np.reshape(z_counts, (nx, ny))
fig = plt.figure(figsize=(8, 8))
plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)
plt.colorbar(label='bikes predicted')
plt.xlim(np.min(x_station), np.max(x_station))
plt.ylim(np.min(y_day), np.max(y_day))
plt.xlabel('Start station number')
plt.ylabel('Quarter')
print('Mean Absolute Error')
print(mean_absolute_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']])))
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']]))))
return
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import pandas
from matplotlib import pyplot as plt
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import AdaBoostRegressor
import numpy as np
from sklearn.metrics import mean_absolute_error, mean_squared_error
from math import sqrt
def main():
df = pandas.read_csv('2016Q1')
df = df.append(pandas.read_csv('2016Q2'))
df = df.append(pandas.read_csv('2016Q3'))
df = df.append(pandas.read_csv('2016Q4'))
test = pandas.read_csv('2017Q1')
test = test.append(pandas.read_csv('2017Q2'))
test = test.append(pandas.read_csv('2017Q3'))
test = test.append(pandas.read_csv('2017Q4'))
train_predict_1d(df, test)
return
def make_scatter(df):
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.show()
return
def train_predict_1d(df, test):
regressor = DecisionTreeRegressor(max_depth=2)
regressor.fit(np.array([df['Start station number']]).T, df['Counts'])
xx = np.array([test['Start station number']]).T
plt.figure(figsize=(8, 6))
plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'
)
plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=
'prediction')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.legend()
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))
return
def train_predict_2d(df, test):
regressor = DecisionTreeRegressor()
regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])
nx = 30
ny = 30
x_station = np.linspace(30800, 32300, nx)
y_day = np.linspace(0, 3, ny)
xx, yy = np.meshgrid(x_station, y_day)
z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)
zz = np.reshape(z_counts, (nx, ny))
fig = plt.figure(figsize=(8, 8))
plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)
plt.colorbar(label='bikes predicted')
plt.xlim(np.min(x_station), np.max(x_station))
plt.ylim(np.min(y_day), np.max(y_day))
plt.xlabel('Start station number')
plt.ylabel('Quarter')
print('Mean Absolute Error')
print(mean_absolute_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']])))
print('RMSE')
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[
'Start station number', 'Quarter']]))))
return
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/env python3
import pandas
from matplotlib import pyplot as plt
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import AdaBoostRegressor
import numpy as np
from sklearn.metrics import mean_absolute_error, mean_squared_error
from math import sqrt
def main():
df = pandas.read_csv("2016Q1")
df = df.append(pandas.read_csv("2016Q2"))
df = df.append(pandas.read_csv("2016Q3"))
df = df.append(pandas.read_csv("2016Q4"))
test = pandas.read_csv("2017Q1")
test = test.append(pandas.read_csv("2017Q2"))
test = test.append(pandas.read_csv("2017Q3"))
test = test.append(pandas.read_csv("2017Q4"))
#make_scatter(df)
train_predict_1d(df, test)
#train_predict_2d(df, test)
return
def make_scatter(df):
plt.figure(figsize=(8,6))
plt.plot(df['Start station number'], df['Counts'], 'o')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.show()
return
def train_predict_1d(df, test):
regressor = DecisionTreeRegressor(max_depth=2)
regressor.fit(np.array([df['Start station number']]).T, df['Counts'])
xx = np.array([test['Start station number']]).T
plt.figure(figsize=(8,6))
plt.plot(df['Start station number'], df['Counts'], 'o', label='observation')
plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=.7, label='prediction')
plt.xlabel('Station')
plt.ylabel('Counts')
plt.legend()
#plt.show()
print("RMSE")
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))
return
def train_predict_2d(df, test):
#regressor = AdaBoostRegressor(DecisionTreeRegressor(max_depth=10), n_estimators=50, loss="square")
regressor = DecisionTreeRegressor()
regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])
nx = 30
ny = 30
x_station = np.linspace(30800,32300, nx)
y_day = np.linspace(0, 3, ny)
xx, yy = np.meshgrid(x_station, y_day)
z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)
zz = np.reshape(z_counts, (nx, ny))
fig = plt.figure(figsize=(8, 8))
plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)
plt.colorbar(label='bikes predicted')
#plt.scatter(test['Start station number'], test['Counts'], s=test['Counts']/25.0, c='g')
plt.xlim(np.min(x_station), np.max(x_station))
plt.ylim(np.min(y_day), np.max(y_day))
plt.xlabel('Start station number')
plt.ylabel('Quarter')
#plt.show()
#fig.savefig("2d_prediction_quarter")
print("Mean Absolute Error")
print(mean_absolute_error(test['Counts'], regressor.predict(test[['Start station number', 'Quarter']])))
print("RMSE")
print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[['Start station number', 'Quarter']]))))
return
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "e35dbcdef8779ffabc34b5e5c543e35b29523971",
"index": 7989,
"step-1": "<mask token>\n\n\ndef make_scatter(df):\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.show()\n return\n\n\ndef train_predict_1d(df, test):\n regressor = DecisionTreeRegressor(max_depth=2)\n regressor.fit(np.array([df['Start station number']]).T, df['Counts'])\n xx = np.array([test['Start station number']]).T\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'\n )\n plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=\n 'prediction')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.legend()\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))\n return\n\n\ndef train_predict_2d(df, test):\n regressor = DecisionTreeRegressor()\n regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])\n nx = 30\n ny = 30\n x_station = np.linspace(30800, 32300, nx)\n y_day = np.linspace(0, 3, ny)\n xx, yy = np.meshgrid(x_station, y_day)\n z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)\n zz = np.reshape(z_counts, (nx, ny))\n fig = plt.figure(figsize=(8, 8))\n plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)\n plt.colorbar(label='bikes predicted')\n plt.xlim(np.min(x_station), np.max(x_station))\n plt.ylim(np.min(y_day), np.max(y_day))\n plt.xlabel('Start station number')\n plt.ylabel('Quarter')\n print('Mean Absolute Error')\n print(mean_absolute_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']])))\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']]))))\n return\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n df = pandas.read_csv('2016Q1')\n df = df.append(pandas.read_csv('2016Q2'))\n df = df.append(pandas.read_csv('2016Q3'))\n df = df.append(pandas.read_csv('2016Q4'))\n test = pandas.read_csv('2017Q1')\n test = test.append(pandas.read_csv('2017Q2'))\n test = test.append(pandas.read_csv('2017Q3'))\n test = test.append(pandas.read_csv('2017Q4'))\n train_predict_1d(df, test)\n return\n\n\ndef make_scatter(df):\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.show()\n return\n\n\ndef train_predict_1d(df, test):\n regressor = DecisionTreeRegressor(max_depth=2)\n regressor.fit(np.array([df['Start station number']]).T, df['Counts'])\n xx = np.array([test['Start station number']]).T\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'\n )\n plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=\n 'prediction')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.legend()\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))\n return\n\n\ndef train_predict_2d(df, test):\n regressor = DecisionTreeRegressor()\n regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])\n nx = 30\n ny = 30\n x_station = np.linspace(30800, 32300, nx)\n y_day = np.linspace(0, 3, ny)\n xx, yy = np.meshgrid(x_station, y_day)\n z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)\n zz = np.reshape(z_counts, (nx, ny))\n fig = plt.figure(figsize=(8, 8))\n plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)\n plt.colorbar(label='bikes predicted')\n plt.xlim(np.min(x_station), np.max(x_station))\n plt.ylim(np.min(y_day), np.max(y_day))\n plt.xlabel('Start station number')\n plt.ylabel('Quarter')\n print('Mean Absolute Error')\n print(mean_absolute_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']])))\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']]))))\n return\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n df = pandas.read_csv('2016Q1')\n df = df.append(pandas.read_csv('2016Q2'))\n df = df.append(pandas.read_csv('2016Q3'))\n df = df.append(pandas.read_csv('2016Q4'))\n test = pandas.read_csv('2017Q1')\n test = test.append(pandas.read_csv('2017Q2'))\n test = test.append(pandas.read_csv('2017Q3'))\n test = test.append(pandas.read_csv('2017Q4'))\n train_predict_1d(df, test)\n return\n\n\ndef make_scatter(df):\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.show()\n return\n\n\ndef train_predict_1d(df, test):\n regressor = DecisionTreeRegressor(max_depth=2)\n regressor.fit(np.array([df['Start station number']]).T, df['Counts'])\n xx = np.array([test['Start station number']]).T\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'\n )\n plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=\n 'prediction')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.legend()\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))\n return\n\n\ndef train_predict_2d(df, test):\n regressor = DecisionTreeRegressor()\n regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])\n nx = 30\n ny = 30\n x_station = np.linspace(30800, 32300, nx)\n y_day = np.linspace(0, 3, ny)\n xx, yy = np.meshgrid(x_station, y_day)\n z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)\n zz = np.reshape(z_counts, (nx, ny))\n fig = plt.figure(figsize=(8, 8))\n plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)\n plt.colorbar(label='bikes predicted')\n plt.xlim(np.min(x_station), np.max(x_station))\n plt.ylim(np.min(y_day), np.max(y_day))\n plt.xlabel('Start station number')\n plt.ylabel('Quarter')\n print('Mean Absolute Error')\n print(mean_absolute_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']])))\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']]))))\n return\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import pandas\nfrom matplotlib import pyplot as plt\nfrom sklearn.tree import DecisionTreeRegressor\nfrom sklearn.ensemble import AdaBoostRegressor\nimport numpy as np\nfrom sklearn.metrics import mean_absolute_error, mean_squared_error\nfrom math import sqrt\n\n\ndef main():\n df = pandas.read_csv('2016Q1')\n df = df.append(pandas.read_csv('2016Q2'))\n df = df.append(pandas.read_csv('2016Q3'))\n df = df.append(pandas.read_csv('2016Q4'))\n test = pandas.read_csv('2017Q1')\n test = test.append(pandas.read_csv('2017Q2'))\n test = test.append(pandas.read_csv('2017Q3'))\n test = test.append(pandas.read_csv('2017Q4'))\n train_predict_1d(df, test)\n return\n\n\ndef make_scatter(df):\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.show()\n return\n\n\ndef train_predict_1d(df, test):\n regressor = DecisionTreeRegressor(max_depth=2)\n regressor.fit(np.array([df['Start station number']]).T, df['Counts'])\n xx = np.array([test['Start station number']]).T\n plt.figure(figsize=(8, 6))\n plt.plot(df['Start station number'], df['Counts'], 'o', label='observation'\n )\n plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=0.7, label=\n 'prediction')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.legend()\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))\n return\n\n\ndef train_predict_2d(df, test):\n regressor = DecisionTreeRegressor()\n regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])\n nx = 30\n ny = 30\n x_station = np.linspace(30800, 32300, nx)\n y_day = np.linspace(0, 3, ny)\n xx, yy = np.meshgrid(x_station, y_day)\n z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)\n zz = np.reshape(z_counts, (nx, ny))\n fig = plt.figure(figsize=(8, 8))\n plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)\n plt.colorbar(label='bikes predicted')\n plt.xlim(np.min(x_station), np.max(x_station))\n plt.ylim(np.min(y_day), np.max(y_day))\n plt.xlabel('Start station number')\n plt.ylabel('Quarter')\n print('Mean Absolute Error')\n print(mean_absolute_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']])))\n print('RMSE')\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[[\n 'Start station number', 'Quarter']]))))\n return\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python3\n\nimport pandas\nfrom matplotlib import pyplot as plt\nfrom sklearn.tree import DecisionTreeRegressor\nfrom sklearn.ensemble import AdaBoostRegressor\nimport numpy as np\nfrom sklearn.metrics import mean_absolute_error, mean_squared_error\nfrom math import sqrt\n\ndef main():\n df = pandas.read_csv(\"2016Q1\")\n df = df.append(pandas.read_csv(\"2016Q2\"))\n df = df.append(pandas.read_csv(\"2016Q3\"))\n df = df.append(pandas.read_csv(\"2016Q4\"))\n\n test = pandas.read_csv(\"2017Q1\")\n test = test.append(pandas.read_csv(\"2017Q2\"))\n test = test.append(pandas.read_csv(\"2017Q3\"))\n test = test.append(pandas.read_csv(\"2017Q4\"))\n #make_scatter(df)\n train_predict_1d(df, test)\n #train_predict_2d(df, test)\n return\n\ndef make_scatter(df):\n plt.figure(figsize=(8,6))\n plt.plot(df['Start station number'], df['Counts'], 'o')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.show()\n return\n\ndef train_predict_1d(df, test):\n regressor = DecisionTreeRegressor(max_depth=2)\n regressor.fit(np.array([df['Start station number']]).T, df['Counts'])\n \n xx = np.array([test['Start station number']]).T\n plt.figure(figsize=(8,6))\n plt.plot(df['Start station number'], df['Counts'], 'o', label='observation')\n plt.plot(xx, regressor.predict(xx), linewidth=4, alpha=.7, label='prediction')\n plt.xlabel('Station')\n plt.ylabel('Counts')\n plt.legend()\n #plt.show()\n\n print(\"RMSE\")\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(xx))))\n return\n\ndef train_predict_2d(df, test):\n #regressor = AdaBoostRegressor(DecisionTreeRegressor(max_depth=10), n_estimators=50, loss=\"square\")\n regressor = DecisionTreeRegressor()\n regressor.fit(df[['Start station number', 'Quarter']], df['Counts'])\n\n nx = 30\n ny = 30\n \n x_station = np.linspace(30800,32300, nx) \n y_day = np.linspace(0, 3, ny)\n xx, yy = np.meshgrid(x_station, y_day)\n\n z_counts = regressor.predict(np.array([xx.flatten(), yy.flatten()]).T)\n zz = np.reshape(z_counts, (nx, ny))\n\n fig = plt.figure(figsize=(8, 8))\n plt.pcolormesh(x_station, y_day, zz, cmap=plt.cm.YlOrRd)\n plt.colorbar(label='bikes predicted') \n #plt.scatter(test['Start station number'], test['Counts'], s=test['Counts']/25.0, c='g')\n plt.xlim(np.min(x_station), np.max(x_station))\n plt.ylim(np.min(y_day), np.max(y_day))\n plt.xlabel('Start station number')\n plt.ylabel('Quarter')\n #plt.show()\n #fig.savefig(\"2d_prediction_quarter\")\n\n print(\"Mean Absolute Error\")\n print(mean_absolute_error(test['Counts'], regressor.predict(test[['Start station number', 'Quarter']])))\n print(\"RMSE\")\n print(sqrt(mean_squared_error(test['Counts'], regressor.predict(test[['Start station number', 'Quarter']]))))\n\n return\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class model:
def __init__(self):
self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):
'Brute Force', (5): 'DDoS attacks', (4): 0}
try:
self.model = load('./decision_tree_model.joblib')
self.attack_model = load('./attack_model.joblib')
except:
logging.error("Model can't be found in the main directory")
logging.error('please fix the problem and restart the server')
try:
self.all_features = open('./all_features.txt', 'r').readline(
).split(',')
self.features = open('./features.txt', 'r').read().splitlines()
except:
logging.error("features.txt can't be found in the main directory")
logging.error('please fix the problem and restart the server')
def preprocess(self, data):
data = data[self.features]
data = data.replace([np.inf, -np.inf], np.nan)
data = data.dropna()
data = data.astype('float')
return data.to_numpy()
def load_data_csv(self, path='./data_examples/example.csv'):
self.data = pd.read_csv(path)
if 'Label' in self.data.columns:
self.label = self.data['Label'].to_numpy()
else:
self.label = None
logging.info('This data is labeled')
self.data = self.preprocess(self.data)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class model:
def __init__(self):
self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):
'Brute Force', (5): 'DDoS attacks', (4): 0}
try:
self.model = load('./decision_tree_model.joblib')
self.attack_model = load('./attack_model.joblib')
except:
logging.error("Model can't be found in the main directory")
logging.error('please fix the problem and restart the server')
try:
self.all_features = open('./all_features.txt', 'r').readline(
).split(',')
self.features = open('./features.txt', 'r').read().splitlines()
except:
logging.error("features.txt can't be found in the main directory")
logging.error('please fix the problem and restart the server')
def preprocess(self, data):
data = data[self.features]
data = data.replace([np.inf, -np.inf], np.nan)
data = data.dropna()
data = data.astype('float')
return data.to_numpy()
def load_data_csv(self, path='./data_examples/example.csv'):
self.data = pd.read_csv(path)
if 'Label' in self.data.columns:
self.label = self.data['Label'].to_numpy()
else:
self.label = None
logging.info('This data is labeled')
self.data = self.preprocess(self.data)
<|reserved_special_token_0|>
def predict(self):
results = []
self.prediction = self.model.predict(self.data).astype('int32')
if self.prediction.shape[0] == 1:
if self.prediction.item() == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[0, :].reshape(1, -1)).item()])
else:
results.append(0)
else:
for i in range(self.prediction.shape[0]):
if self.prediction[i] == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[i, :].reshape(1, -1)).item()])
else:
results.append(0)
return results
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class model:
def __init__(self):
self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):
'Brute Force', (5): 'DDoS attacks', (4): 0}
try:
self.model = load('./decision_tree_model.joblib')
self.attack_model = load('./attack_model.joblib')
except:
logging.error("Model can't be found in the main directory")
logging.error('please fix the problem and restart the server')
try:
self.all_features = open('./all_features.txt', 'r').readline(
).split(',')
self.features = open('./features.txt', 'r').read().splitlines()
except:
logging.error("features.txt can't be found in the main directory")
logging.error('please fix the problem and restart the server')
def preprocess(self, data):
data = data[self.features]
data = data.replace([np.inf, -np.inf], np.nan)
data = data.dropna()
data = data.astype('float')
return data.to_numpy()
def load_data_csv(self, path='./data_examples/example.csv'):
self.data = pd.read_csv(path)
if 'Label' in self.data.columns:
self.label = self.data['Label'].to_numpy()
else:
self.label = None
logging.info('This data is labeled')
self.data = self.preprocess(self.data)
<|reserved_special_token_0|>
def predict(self):
results = []
self.prediction = self.model.predict(self.data).astype('int32')
if self.prediction.shape[0] == 1:
if self.prediction.item() == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[0, :].reshape(1, -1)).item()])
else:
results.append(0)
else:
for i in range(self.prediction.shape[0]):
if self.prediction[i] == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[i, :].reshape(1, -1)).item()])
else:
results.append(0)
return results
def accuracy(self):
if self.label is None:
logging.error("Score can't be calculated, No label provided")
logging.error("be sure to name your label column with 'Lebel'")
return None
else:
from sklearn.metrics import accuracy_score
accuracy = accuracy_score(self.label, self.prediction)
return accuracy
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if not sys.warnoptions:
warnings.simplefilter('ignore')
class model:
def __init__(self):
self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):
'Brute Force', (5): 'DDoS attacks', (4): 0}
try:
self.model = load('./decision_tree_model.joblib')
self.attack_model = load('./attack_model.joblib')
except:
logging.error("Model can't be found in the main directory")
logging.error('please fix the problem and restart the server')
try:
self.all_features = open('./all_features.txt', 'r').readline(
).split(',')
self.features = open('./features.txt', 'r').read().splitlines()
except:
logging.error("features.txt can't be found in the main directory")
logging.error('please fix the problem and restart the server')
def preprocess(self, data):
data = data[self.features]
data = data.replace([np.inf, -np.inf], np.nan)
data = data.dropna()
data = data.astype('float')
return data.to_numpy()
def load_data_csv(self, path='./data_examples/example.csv'):
self.data = pd.read_csv(path)
if 'Label' in self.data.columns:
self.label = self.data['Label'].to_numpy()
else:
self.label = None
logging.info('This data is labeled')
self.data = self.preprocess(self.data)
def load_data(self, rows):
self.data = pd.DataFrame([x.strip(',').split(',') for x in rows.
strip('bpoint').split('bpoint')], columns=self.all_features)
self.data = self.preprocess(self.data)
def predict(self):
results = []
self.prediction = self.model.predict(self.data).astype('int32')
if self.prediction.shape[0] == 1:
if self.prediction.item() == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[0, :].reshape(1, -1)).item()])
else:
results.append(0)
else:
for i in range(self.prediction.shape[0]):
if self.prediction[i] == 1:
results.append(self.number_to_label[self.attack_model.
predict(self.data[i, :].reshape(1, -1)).item()])
else:
results.append(0)
return results
def accuracy(self):
if self.label is None:
logging.error("Score can't be calculated, No label provided")
logging.error("be sure to name your label column with 'Lebel'")
return None
else:
from sklearn.metrics import accuracy_score
accuracy = accuracy_score(self.label, self.prediction)
return accuracy
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import pandas as pd
import numpy as np
import logging
import sklearn
from joblib import load
import sys
import warnings
import os
if not sys.warnoptions:
warnings.simplefilter("ignore")
class model:
def __init__(self):
#from number to labels
self.number_to_label = {1 : "Bot",2 : 'DoS attack',3 : 'Brute Force', 5 : 'DDoS attacks',4 : 0}
# load the pretrained model
try:
self.model = load('./decision_tree_model.joblib')
self.attack_model = load('./attack_model.joblib')
except:
# error if model can't be found in the path
logging.error("Model can\'t be found in the main directory")
logging.error("please fix the problem and restart the server")
# load the features for the preprocessing step
try:
self.all_features = open("./all_features.txt", "r").readline().split(',')
self.features = open("./features.txt", "r").read().splitlines()
except:
# error if features file can't be found in the path
logging.error("features.txt can\'t be found in the main directory")
logging.error("please fix the problem and restart the server")
def preprocess(self,data):
#select only the columns that works best with the pretrained model
data = data[self.features]
#remove infinite and null values
data = data.replace([np.inf, -np.inf], np.nan)
data = data.dropna()
#change the type of the data to float
data = data.astype("float")
#return the data as numpy array
return data.to_numpy()
def load_data_csv(self,path = './data_examples/example.csv'):
#load and preprocess the csv file
self.data = pd.read_csv(path)
#for evaluation tasks, we will save the label
if ('Label' in self.data.columns):
self.label = self.data['Label'].to_numpy()
else:
self.label = None
logging.info('This data is labeled')
self.data = self.preprocess(self.data)
def load_data(self, rows) :
#Load and preprocess strings in csv format
self.data =pd.DataFrame([x.strip(',').split(',') for x in rows.strip('bpoint').split('bpoint')],columns = self.all_features)
self.data = self.preprocess(self.data)
def predict(self):
results = []
#predict the class of the flow
self.prediction = self.model.predict(self.data).astype('int32')
#in case of one row prediction
if (self.prediction.shape[0] == 1 ):
if (self.prediction.item() == 1):
results.append(self.number_to_label[self.attack_model.predict(self.data[0,:].reshape(1, -1)).item()])
else:
results.append(0)
else:
for i in range(self.prediction.shape[0]):
if (self.prediction[i] == 1):
results.append(self.number_to_label[self.attack_model.predict(self.data[i,:].reshape(1, -1)).item()])
else:
results.append(0)
return results
def accuracy(self):
#calculate accuracy in case of label availaiblity
if (self.label is None):
logging.error("Score can't be calculated, No label provided")
logging.error("be sure to name your label column with 'Lebel'")
return None
else:
from sklearn.metrics import accuracy_score
accuracy = accuracy_score(self.label, self.prediction)
return accuracy
"""
m = model()
m.load_data(sys.argv[1])
prediction = m.predict()
"""
|
flexible
|
{
"blob_id": "c0f3a957613a4f4e04aeb3eb2e3fa4053bd0122c",
"index": 8438,
"step-1": "<mask token>\n\n\nclass model:\n\n def __init__(self):\n self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):\n 'Brute Force', (5): 'DDoS attacks', (4): 0}\n try:\n self.model = load('./decision_tree_model.joblib')\n self.attack_model = load('./attack_model.joblib')\n except:\n logging.error(\"Model can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n try:\n self.all_features = open('./all_features.txt', 'r').readline(\n ).split(',')\n self.features = open('./features.txt', 'r').read().splitlines()\n except:\n logging.error(\"features.txt can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n\n def preprocess(self, data):\n data = data[self.features]\n data = data.replace([np.inf, -np.inf], np.nan)\n data = data.dropna()\n data = data.astype('float')\n return data.to_numpy()\n\n def load_data_csv(self, path='./data_examples/example.csv'):\n self.data = pd.read_csv(path)\n if 'Label' in self.data.columns:\n self.label = self.data['Label'].to_numpy()\n else:\n self.label = None\n logging.info('This data is labeled')\n self.data = self.preprocess(self.data)\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass model:\n\n def __init__(self):\n self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):\n 'Brute Force', (5): 'DDoS attacks', (4): 0}\n try:\n self.model = load('./decision_tree_model.joblib')\n self.attack_model = load('./attack_model.joblib')\n except:\n logging.error(\"Model can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n try:\n self.all_features = open('./all_features.txt', 'r').readline(\n ).split(',')\n self.features = open('./features.txt', 'r').read().splitlines()\n except:\n logging.error(\"features.txt can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n\n def preprocess(self, data):\n data = data[self.features]\n data = data.replace([np.inf, -np.inf], np.nan)\n data = data.dropna()\n data = data.astype('float')\n return data.to_numpy()\n\n def load_data_csv(self, path='./data_examples/example.csv'):\n self.data = pd.read_csv(path)\n if 'Label' in self.data.columns:\n self.label = self.data['Label'].to_numpy()\n else:\n self.label = None\n logging.info('This data is labeled')\n self.data = self.preprocess(self.data)\n <mask token>\n\n def predict(self):\n results = []\n self.prediction = self.model.predict(self.data).astype('int32')\n if self.prediction.shape[0] == 1:\n if self.prediction.item() == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[0, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n else:\n for i in range(self.prediction.shape[0]):\n if self.prediction[i] == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[i, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n return results\n <mask token>\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass model:\n\n def __init__(self):\n self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):\n 'Brute Force', (5): 'DDoS attacks', (4): 0}\n try:\n self.model = load('./decision_tree_model.joblib')\n self.attack_model = load('./attack_model.joblib')\n except:\n logging.error(\"Model can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n try:\n self.all_features = open('./all_features.txt', 'r').readline(\n ).split(',')\n self.features = open('./features.txt', 'r').read().splitlines()\n except:\n logging.error(\"features.txt can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n\n def preprocess(self, data):\n data = data[self.features]\n data = data.replace([np.inf, -np.inf], np.nan)\n data = data.dropna()\n data = data.astype('float')\n return data.to_numpy()\n\n def load_data_csv(self, path='./data_examples/example.csv'):\n self.data = pd.read_csv(path)\n if 'Label' in self.data.columns:\n self.label = self.data['Label'].to_numpy()\n else:\n self.label = None\n logging.info('This data is labeled')\n self.data = self.preprocess(self.data)\n <mask token>\n\n def predict(self):\n results = []\n self.prediction = self.model.predict(self.data).astype('int32')\n if self.prediction.shape[0] == 1:\n if self.prediction.item() == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[0, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n else:\n for i in range(self.prediction.shape[0]):\n if self.prediction[i] == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[i, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n return results\n\n def accuracy(self):\n if self.label is None:\n logging.error(\"Score can't be calculated, No label provided\")\n logging.error(\"be sure to name your label column with 'Lebel'\")\n return None\n else:\n from sklearn.metrics import accuracy_score\n accuracy = accuracy_score(self.label, self.prediction)\n return accuracy\n\n\n<mask token>\n",
"step-4": "<mask token>\nif not sys.warnoptions:\n warnings.simplefilter('ignore')\n\n\nclass model:\n\n def __init__(self):\n self.number_to_label = {(1): 'Bot', (2): 'DoS attack', (3):\n 'Brute Force', (5): 'DDoS attacks', (4): 0}\n try:\n self.model = load('./decision_tree_model.joblib')\n self.attack_model = load('./attack_model.joblib')\n except:\n logging.error(\"Model can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n try:\n self.all_features = open('./all_features.txt', 'r').readline(\n ).split(',')\n self.features = open('./features.txt', 'r').read().splitlines()\n except:\n logging.error(\"features.txt can't be found in the main directory\")\n logging.error('please fix the problem and restart the server')\n\n def preprocess(self, data):\n data = data[self.features]\n data = data.replace([np.inf, -np.inf], np.nan)\n data = data.dropna()\n data = data.astype('float')\n return data.to_numpy()\n\n def load_data_csv(self, path='./data_examples/example.csv'):\n self.data = pd.read_csv(path)\n if 'Label' in self.data.columns:\n self.label = self.data['Label'].to_numpy()\n else:\n self.label = None\n logging.info('This data is labeled')\n self.data = self.preprocess(self.data)\n\n def load_data(self, rows):\n self.data = pd.DataFrame([x.strip(',').split(',') for x in rows.\n strip('bpoint').split('bpoint')], columns=self.all_features)\n self.data = self.preprocess(self.data)\n\n def predict(self):\n results = []\n self.prediction = self.model.predict(self.data).astype('int32')\n if self.prediction.shape[0] == 1:\n if self.prediction.item() == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[0, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n else:\n for i in range(self.prediction.shape[0]):\n if self.prediction[i] == 1:\n results.append(self.number_to_label[self.attack_model.\n predict(self.data[i, :].reshape(1, -1)).item()])\n else:\n results.append(0)\n return results\n\n def accuracy(self):\n if self.label is None:\n logging.error(\"Score can't be calculated, No label provided\")\n logging.error(\"be sure to name your label column with 'Lebel'\")\n return None\n else:\n from sklearn.metrics import accuracy_score\n accuracy = accuracy_score(self.label, self.prediction)\n return accuracy\n\n\n<mask token>\n",
"step-5": "import pandas as pd\nimport numpy as np\nimport logging\nimport sklearn\nfrom joblib import load\nimport sys\nimport warnings\nimport os\n\nif not sys.warnoptions:\n warnings.simplefilter(\"ignore\")\n\nclass model:\n def __init__(self):\n #from number to labels\n self.number_to_label = {1 : \"Bot\",2 : 'DoS attack',3 : 'Brute Force', 5 : 'DDoS attacks',4 : 0}\n # load the pretrained model \n try:\n self.model = load('./decision_tree_model.joblib')\n self.attack_model = load('./attack_model.joblib')\n except:\n # error if model can't be found in the path\n logging.error(\"Model can\\'t be found in the main directory\")\n logging.error(\"please fix the problem and restart the server\")\n\n # load the features for the preprocessing step\n try:\n self.all_features = open(\"./all_features.txt\", \"r\").readline().split(',')\n self.features = open(\"./features.txt\", \"r\").read().splitlines()\n except:\n # error if features file can't be found in the path\n logging.error(\"features.txt can\\'t be found in the main directory\")\n logging.error(\"please fix the problem and restart the server\")\n\n def preprocess(self,data):\n #select only the columns that works best with the pretrained model\n data = data[self.features]\n #remove infinite and null values\n data = data.replace([np.inf, -np.inf], np.nan)\n data = data.dropna()\n #change the type of the data to float\n data = data.astype(\"float\")\n #return the data as numpy array\n return data.to_numpy()\n\n def load_data_csv(self,path = './data_examples/example.csv'):\n #load and preprocess the csv file\n self.data = pd.read_csv(path)\n #for evaluation tasks, we will save the label\n if ('Label' in self.data.columns):\n self.label = self.data['Label'].to_numpy()\n else:\n self.label = None\n logging.info('This data is labeled')\n\n self.data = self.preprocess(self.data)\n\n def load_data(self, rows) :\n #Load and preprocess strings in csv format \n self.data =pd.DataFrame([x.strip(',').split(',') for x in rows.strip('bpoint').split('bpoint')],columns = self.all_features)\n self.data = self.preprocess(self.data)\n\n def predict(self):\n results = []\n #predict the class of the flow\n self.prediction = self.model.predict(self.data).astype('int32')\n #in case of one row prediction\n if (self.prediction.shape[0] == 1 ):\n if (self.prediction.item() == 1):\n results.append(self.number_to_label[self.attack_model.predict(self.data[0,:].reshape(1, -1)).item()])\n else:\n results.append(0)\n \n else:\n for i in range(self.prediction.shape[0]):\n if (self.prediction[i] == 1):\n results.append(self.number_to_label[self.attack_model.predict(self.data[i,:].reshape(1, -1)).item()])\n else:\n results.append(0)\n return results\n \n def accuracy(self):\n #calculate accuracy in case of label availaiblity\n if (self.label is None):\n logging.error(\"Score can't be calculated, No label provided\")\n logging.error(\"be sure to name your label column with 'Lebel'\")\n return None\n else:\n from sklearn.metrics import accuracy_score\n accuracy = accuracy_score(self.label, self.prediction)\n return accuracy\n\"\"\"\nm = model()\nm.load_data(sys.argv[1])\nprediction = m.predict()\n\"\"\"\n\n\n",
"step-ids": [
4,
5,
6,
8,
10
]
}
|
[
4,
5,
6,
8,
10
] |
<|reserved_special_token_0|>
class Solution:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:
que = deque([(start, 0)])
visited = set()
while que:
x, steps = que.popleft()
for i in nums:
for t in [x + i, x - i, x ^ i]:
if t == goal:
return steps + 1
if 0 <= t <= 1000 and t not in visited:
visited.add(t)
que.append((t, steps + 1))
return -1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Solution:
def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:
que = deque([(start, 0)])
visited = set()
while que:
x, steps = que.popleft()
for i in nums:
for t in [x + i, x - i, x ^ i]:
if t == goal:
return steps + 1
if 0 <= t <= 1000 and t not in visited:
visited.add(t)
que.append((t, steps + 1))
return -1
if __name__ == '__main__':
print(Solution().minimumOperations(nums=[2, 4, 12], start=2, goal=12))
print(Solution().minimumOperations(nums=[3, 5, 7], start=0, goal=-4))
print(Solution().minimumOperations(nums=[2, 8, 16], start=0, goal=1))
nums = [-574083075, -393928592, -508025046, 942818778, 355796909,
515245901, 40297943, 106087952, 112856312, -516143616, 363801856,
431681353, 726373078, 947630603, 357311001, 594181298, -797268217,
-741740009, 310972287, 588107527, -535699426, 56324906, -77958073,
739798122, -839472160, 439902753, -599749231, -378067373, -
466272504, -668036170, 404827976, 805486978, -762507067, 726001618,
-761047930, 574054980, 365793614, 112020312, 612806855, -256862366,
174046424, 646109365, 263765015, 952305939, 864217737, -236873371,
-991807014, 365730786, -908194963, -778205177, -949314048, -
636570500, -883257881, 316313456, -846577965, 132287864, -143230736,
425542510, -99852882, -845180792, -329895545, 402782707, -52191127,
-470380017, -788836785, -655887976, -899430590, 481923982, 45348738,
-595401481, -470990760, -417390352, -570278840, -873871723, -
905595403, 276201114, -733014032, 126018863, 452235438, -512574658,
-172220362, 845468743, -743189114, 597319839, -584451932, 410604481,
-508885990, -670396751, -765996786, 345814977, -920014372, -
826696704, 640912714, 119494504, 745808962, -503060001, -677959595,
-831428592, 282855843, 150678167, -467803553, -503929808, 636431692,
-235369757, -964826080, 93942566, -65314422, -385277528, -379647659,
601981747, -724269861, -516713072, -487487495, 655771565, 406499531,
-943540581, -290169291, 438686645, -227355533, -822612523,
218329747, -800810927, -944724740, -978181517, 274815523, 296317841,
56043572, -712672386, -374759873, 86973233, -246165119, 73819230, -
801140338, 414767806, 883318746, -822063159, -705772942, -674915800,
710520717, -97115365, 599549847, 115344568, 53002314, 242487774, -
665998906, -986068895, -844909606, -515222297, -500827406,
317865850, -50395059, 522417393, 51184184, 241544846, -996297136, -
227251827, 924359619, 822815774, 149467545, 523511343, 252991991,
450254984, -393459583, 617410075, 197030479, -234418418, -256650708,
872334551, 779068346, 216294504, -708680875, -171498970, -970211466,
-176493993, 729939373, -658054782, -342680218, 75508900, -377139149,
392008859, 121412250, -163586626, -468148273, 624248706, 50004864,
-862378428, -849927586, 33598413, -157654824, -229712613, 149116317,
183820138, 378717707, -995563605, 777654910, 511275580, -157964872,
-718605034, -764316227, -225837302, -166208500, -587688677,
78982205, -488693575, 667205793, 419165994, 731543316, 97551954, -
387317666, -580873271, 533504431, -31624036, -356035140, -849089082,
-767376392, -625237600, 940717947, -337709497, 915255567, 727274007,
-879463448, -363148174, -854892492, 110472344, -466194659, -
146843198, -454944217, -365338018, -349424052, 994474446, -
554968068, -883734951, -697723265, 583756420, -5696410, -413731452,
-278706136, -399245668, 83345207, -227231270, 618384545, 846514423,
-556667092, 590460194, -686116067, -509669269, -510065093, 77094171,
270317951, 166095128, -918526061, -766370855, -20861321, 478791777,
663673443, -152055285, 224745414, 123998803, 66824877, -85117337,
212126175, -718523523, 615359230, -212148589, 620733736, -81197397,
51814471, 709312024, 562145805, -770811828, 321230393, -611636320,
-421337549, -804527290, -416739656, -886764000, 170695026,
414273830, -449987380, -56782953, 772039002, -961265403, -896009751,
-524231358, 497253209, -507048459, -308522246, -508249054, -
53240581, -241704483, -974133571, 232897679, -152365934, -861310248,
-305766289, 340680726, 844612779, -180227470, 40798478, 729446447,
395975250, -142447074, -606021375, 47555730, 294446347, 452346091,
-409427076, -845574381, -838995437, 45787728, 714700474, -315824001,
694717388, 502723269, 119244099, -538412679, -207297135, -189078560,
-812610469, -350061253, -73975237, -119323509, 791863263, 741180208,
740488891, -475394166, -191585617, -441527154, 767292531, 201222965,
-150196525, 588513813, 245328283, 396662663, 100705864, 126789247,
487161165, -460512081, -469521559, -998848254, -917609155,
314537168, 418002454, -926920818, -628671538, 179971032, -105401559,
449618919, 823404672, 178494651, -773108884, 10686795, -506642993,
-60172121, -510142552, 651623281, -163851428, 158562600, -782456228,
-336697076, -571952851, 849878818, -456510759, -65997243, -
506043404, -558981572, 186946604, 124948039, 954065944, 707437320,
-224056616, -319237038, 512138196, 742466011, -49725596, -784781640,
-753413026, -331602365, -246166733, -658650959, -4888181, -
547553549, 786689548, -866846384, -212028209, -98029403, -325422497,
-409855095, 320083382, -491251215, -471713326, 890922019, -
766590943, -481641953, -227197451, -709166930, -965945544,
407688175, -78385698, -372800469, 389036825, 79885300, -858488452,
-390177477, 233839191, -518116358, 420408256, 872470025, 241770824,
-106901417, -328631191, 548580365, -88408815, -647601013, 658880218,
-870455388, 277154380, 370022702, -381519264, -800726224, 183685380,
208169777, 925905330, 732494840, 251754641, -681988029, 593628349,
153852085, 353590607, 242118102, -788094641, -242801844, 474214244,
579450364, 580046580, -269927114, 249739292, 295331955, -544556236,
-814569172, 808895922, 707421114, 305101587, 621173158, -248896453,
988552702, -375313331, -87289858, -796466539, -529411285, -
197315984, 33984203, -122839651, -90735568, 277265491, 762059774, -
628018119, -406508643, -856856769, 364613737, 59319066, 614382155,
-614620718, -133957131, -394985422, -29943491, 154443077, -72727846,
392096990, 562681453, 364248049, -156700958, 717335155, -343408748,
77301840, -155372684, -432114609, 414752267, -485732822, 876096548,
842614035, -614245110, -872219121, 291509502, 334817026, 214330487,
405297459, -449582485, 789314834, 936409758, 452350380, -146649749,
898255045, 116506422, 671728835, 280507922, -189039799, -565803074,
-439924663, -14345985, -98428526, 57303809, 424685389, -84977856, -
9251973, 998935249, 229402894, -405424548, 448394272, 182149207, -
728030940, 347577568, 567511928, -27655302, 400866779, -509269521,
-580602375, 405956020, -855173313, 258091129, 909162200, -315251598,
-236890006, -531780379, 342955474, -65890269, -111521851, -
139906773, 34939329, 927781348, 300458386, -603518159, 341287362, -
234266006, 634183737, 454833275, 79631354, -954691672, 102295826,
688738167, -958428411, -293858940, 480440548, 590037773, -365477625,
-425165732, 170388756, 164258145, -507355122, 44132561, 982798160,
-101120201, -920959602, -239250887, 534862084, -834736952, -
123162323, 389682556, 656996523, 864481760, 381156936, 129520066, -
995551618, 106129054, -471580461, 856850511, 653020333, 531769579,
-190375506, -992983956, 73867968, -931909584, 403329114, -945055546,
627782991, -666011011, 214665550, 505169020, 210703185, -591690068,
11218620, 790987020, 561646751, -33552011, -407054835, -850936697,
-838201457, -878394038, -759131062, -857347819, 531582062,
941614352, -743754869, 650338718, 178603580, -834368178, -976933957,
138667533, 746471721, 551579035, -173400777, -1191455, 320121832, -
756997945, 402594806, 934711944, 970489131, -193223639, 276816990,
842959026, -799673669, -367385466, 681433973, 468892554, -455199860,
393993101, 905435993, 218314965, 284795080, 913357885, -652530417,
743455659, 869345718, 808902357, 829820413, 7206928, 544900359,
225903242, -507688526, 750219353, -663810717, -643969173, -
269151675, 348252329, -144351998, 693995296, -692546103, 869432378,
650161259, 568234384, 710782517, 179157604, -446849233, -922615096,
-61183498, 30945194, 819052356, 467911324, 119876349, 46908453, -
420671619, 344944591, 889080726, -619477633, 174882730, 553799129,
-941691933, 146036558, -116064711, 222282163, -272996845, -
147041859, -381977096, -786757040, 229096334, 712541239, 326039628,
-952490563, -362214129, -680530864, 421358212, -472290821, -
331398150, -42297937, -393141325, -467541333, 655524006, 452908624,
-626562356, -758303565, 338224482, 312047704, 599445442, -328430584,
259549134, 838272865, -755896597, -151000710, 607787908, 11870257,
-680877184, 528161590, 769242561, -447486537, -127579653, 135915595,
-271181270, 12536315, 693445551, 900639800, -692327759, -671179999,
977783490, 935798407, 659688020, -478438023, -852131846, -900332354,
-71029072, 888095095, 924175448, 430392829, 391195112, 399460998, -
173259008, -168543477, -495967896, -697314804, 591126097, 301126906,
946273416, -772817341, -996445410, 466876435, -92937212, -226599286,
43831927, -588596503, -55759661, 212885530, -805455693, 572269060,
415773175, -320900489, -651775079, 5276363, 91615150, -882588415,
502210147, -401039810, 26713405, -723806893, 125439289, 472777644,
869504248, 967552969, -268043646, -146710780, -511973692, -
803204681, -146827180, -453201623, -878534466, 631307563, 507752930,
-63646026, -348120807, 222898965, -410732708, 617953050, -478244422,
877782569, -507956686, -196516478, -477074335, 329039585, -
480651334, -890030740, 461391919, -977815738, -943937849, 321402466,
-588396975, -945139052, 871313567, -484830305, 365305963, 891985414,
466048577, 880607400, -245705654, 359506342, -612177301, 840415132,
693541406, 707348310, 971762025, -871678269, 897143169, 625100531,
743908163, -315815019, -63211252, -962051459, 510469141, 566817231,
-186207711, 309838979, 101194721, -127111899, -109107404, -
702499174, 918781433, 34041307, 927374088, -67369303, -680339659,
202481166, -218771120, 329951816, -280782626, -423403505, 619779171,
-567310903, -660420942, 756801677, 996208091, 822990010, 940351540,
1331227, 382201579, 891956260, -894584436, 346600029, 805733487, -
691767750, 859030444, 1]
print(Solution().minimumOperations(nums, 938, 80))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from typing import List
from collections import deque
class Solution:
def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:
que = deque([(start, 0)])
visited = set()
while que:
x, steps = que.popleft()
for i in nums:
for t in [x + i, x - i, x ^ i]:
if t == goal:
return steps + 1
if 0 <= t <= 1000 and t not in visited:
visited.add(t)
que.append((t, steps + 1))
return -1
if __name__ == '__main__':
print(Solution().minimumOperations(nums=[2, 4, 12], start=2, goal=12))
print(Solution().minimumOperations(nums=[3, 5, 7], start=0, goal=-4))
print(Solution().minimumOperations(nums=[2, 8, 16], start=0, goal=1))
nums = [-574083075, -393928592, -508025046, 942818778, 355796909,
515245901, 40297943, 106087952, 112856312, -516143616, 363801856,
431681353, 726373078, 947630603, 357311001, 594181298, -797268217,
-741740009, 310972287, 588107527, -535699426, 56324906, -77958073,
739798122, -839472160, 439902753, -599749231, -378067373, -
466272504, -668036170, 404827976, 805486978, -762507067, 726001618,
-761047930, 574054980, 365793614, 112020312, 612806855, -256862366,
174046424, 646109365, 263765015, 952305939, 864217737, -236873371,
-991807014, 365730786, -908194963, -778205177, -949314048, -
636570500, -883257881, 316313456, -846577965, 132287864, -143230736,
425542510, -99852882, -845180792, -329895545, 402782707, -52191127,
-470380017, -788836785, -655887976, -899430590, 481923982, 45348738,
-595401481, -470990760, -417390352, -570278840, -873871723, -
905595403, 276201114, -733014032, 126018863, 452235438, -512574658,
-172220362, 845468743, -743189114, 597319839, -584451932, 410604481,
-508885990, -670396751, -765996786, 345814977, -920014372, -
826696704, 640912714, 119494504, 745808962, -503060001, -677959595,
-831428592, 282855843, 150678167, -467803553, -503929808, 636431692,
-235369757, -964826080, 93942566, -65314422, -385277528, -379647659,
601981747, -724269861, -516713072, -487487495, 655771565, 406499531,
-943540581, -290169291, 438686645, -227355533, -822612523,
218329747, -800810927, -944724740, -978181517, 274815523, 296317841,
56043572, -712672386, -374759873, 86973233, -246165119, 73819230, -
801140338, 414767806, 883318746, -822063159, -705772942, -674915800,
710520717, -97115365, 599549847, 115344568, 53002314, 242487774, -
665998906, -986068895, -844909606, -515222297, -500827406,
317865850, -50395059, 522417393, 51184184, 241544846, -996297136, -
227251827, 924359619, 822815774, 149467545, 523511343, 252991991,
450254984, -393459583, 617410075, 197030479, -234418418, -256650708,
872334551, 779068346, 216294504, -708680875, -171498970, -970211466,
-176493993, 729939373, -658054782, -342680218, 75508900, -377139149,
392008859, 121412250, -163586626, -468148273, 624248706, 50004864,
-862378428, -849927586, 33598413, -157654824, -229712613, 149116317,
183820138, 378717707, -995563605, 777654910, 511275580, -157964872,
-718605034, -764316227, -225837302, -166208500, -587688677,
78982205, -488693575, 667205793, 419165994, 731543316, 97551954, -
387317666, -580873271, 533504431, -31624036, -356035140, -849089082,
-767376392, -625237600, 940717947, -337709497, 915255567, 727274007,
-879463448, -363148174, -854892492, 110472344, -466194659, -
146843198, -454944217, -365338018, -349424052, 994474446, -
554968068, -883734951, -697723265, 583756420, -5696410, -413731452,
-278706136, -399245668, 83345207, -227231270, 618384545, 846514423,
-556667092, 590460194, -686116067, -509669269, -510065093, 77094171,
270317951, 166095128, -918526061, -766370855, -20861321, 478791777,
663673443, -152055285, 224745414, 123998803, 66824877, -85117337,
212126175, -718523523, 615359230, -212148589, 620733736, -81197397,
51814471, 709312024, 562145805, -770811828, 321230393, -611636320,
-421337549, -804527290, -416739656, -886764000, 170695026,
414273830, -449987380, -56782953, 772039002, -961265403, -896009751,
-524231358, 497253209, -507048459, -308522246, -508249054, -
53240581, -241704483, -974133571, 232897679, -152365934, -861310248,
-305766289, 340680726, 844612779, -180227470, 40798478, 729446447,
395975250, -142447074, -606021375, 47555730, 294446347, 452346091,
-409427076, -845574381, -838995437, 45787728, 714700474, -315824001,
694717388, 502723269, 119244099, -538412679, -207297135, -189078560,
-812610469, -350061253, -73975237, -119323509, 791863263, 741180208,
740488891, -475394166, -191585617, -441527154, 767292531, 201222965,
-150196525, 588513813, 245328283, 396662663, 100705864, 126789247,
487161165, -460512081, -469521559, -998848254, -917609155,
314537168, 418002454, -926920818, -628671538, 179971032, -105401559,
449618919, 823404672, 178494651, -773108884, 10686795, -506642993,
-60172121, -510142552, 651623281, -163851428, 158562600, -782456228,
-336697076, -571952851, 849878818, -456510759, -65997243, -
506043404, -558981572, 186946604, 124948039, 954065944, 707437320,
-224056616, -319237038, 512138196, 742466011, -49725596, -784781640,
-753413026, -331602365, -246166733, -658650959, -4888181, -
547553549, 786689548, -866846384, -212028209, -98029403, -325422497,
-409855095, 320083382, -491251215, -471713326, 890922019, -
766590943, -481641953, -227197451, -709166930, -965945544,
407688175, -78385698, -372800469, 389036825, 79885300, -858488452,
-390177477, 233839191, -518116358, 420408256, 872470025, 241770824,
-106901417, -328631191, 548580365, -88408815, -647601013, 658880218,
-870455388, 277154380, 370022702, -381519264, -800726224, 183685380,
208169777, 925905330, 732494840, 251754641, -681988029, 593628349,
153852085, 353590607, 242118102, -788094641, -242801844, 474214244,
579450364, 580046580, -269927114, 249739292, 295331955, -544556236,
-814569172, 808895922, 707421114, 305101587, 621173158, -248896453,
988552702, -375313331, -87289858, -796466539, -529411285, -
197315984, 33984203, -122839651, -90735568, 277265491, 762059774, -
628018119, -406508643, -856856769, 364613737, 59319066, 614382155,
-614620718, -133957131, -394985422, -29943491, 154443077, -72727846,
392096990, 562681453, 364248049, -156700958, 717335155, -343408748,
77301840, -155372684, -432114609, 414752267, -485732822, 876096548,
842614035, -614245110, -872219121, 291509502, 334817026, 214330487,
405297459, -449582485, 789314834, 936409758, 452350380, -146649749,
898255045, 116506422, 671728835, 280507922, -189039799, -565803074,
-439924663, -14345985, -98428526, 57303809, 424685389, -84977856, -
9251973, 998935249, 229402894, -405424548, 448394272, 182149207, -
728030940, 347577568, 567511928, -27655302, 400866779, -509269521,
-580602375, 405956020, -855173313, 258091129, 909162200, -315251598,
-236890006, -531780379, 342955474, -65890269, -111521851, -
139906773, 34939329, 927781348, 300458386, -603518159, 341287362, -
234266006, 634183737, 454833275, 79631354, -954691672, 102295826,
688738167, -958428411, -293858940, 480440548, 590037773, -365477625,
-425165732, 170388756, 164258145, -507355122, 44132561, 982798160,
-101120201, -920959602, -239250887, 534862084, -834736952, -
123162323, 389682556, 656996523, 864481760, 381156936, 129520066, -
995551618, 106129054, -471580461, 856850511, 653020333, 531769579,
-190375506, -992983956, 73867968, -931909584, 403329114, -945055546,
627782991, -666011011, 214665550, 505169020, 210703185, -591690068,
11218620, 790987020, 561646751, -33552011, -407054835, -850936697,
-838201457, -878394038, -759131062, -857347819, 531582062,
941614352, -743754869, 650338718, 178603580, -834368178, -976933957,
138667533, 746471721, 551579035, -173400777, -1191455, 320121832, -
756997945, 402594806, 934711944, 970489131, -193223639, 276816990,
842959026, -799673669, -367385466, 681433973, 468892554, -455199860,
393993101, 905435993, 218314965, 284795080, 913357885, -652530417,
743455659, 869345718, 808902357, 829820413, 7206928, 544900359,
225903242, -507688526, 750219353, -663810717, -643969173, -
269151675, 348252329, -144351998, 693995296, -692546103, 869432378,
650161259, 568234384, 710782517, 179157604, -446849233, -922615096,
-61183498, 30945194, 819052356, 467911324, 119876349, 46908453, -
420671619, 344944591, 889080726, -619477633, 174882730, 553799129,
-941691933, 146036558, -116064711, 222282163, -272996845, -
147041859, -381977096, -786757040, 229096334, 712541239, 326039628,
-952490563, -362214129, -680530864, 421358212, -472290821, -
331398150, -42297937, -393141325, -467541333, 655524006, 452908624,
-626562356, -758303565, 338224482, 312047704, 599445442, -328430584,
259549134, 838272865, -755896597, -151000710, 607787908, 11870257,
-680877184, 528161590, 769242561, -447486537, -127579653, 135915595,
-271181270, 12536315, 693445551, 900639800, -692327759, -671179999,
977783490, 935798407, 659688020, -478438023, -852131846, -900332354,
-71029072, 888095095, 924175448, 430392829, 391195112, 399460998, -
173259008, -168543477, -495967896, -697314804, 591126097, 301126906,
946273416, -772817341, -996445410, 466876435, -92937212, -226599286,
43831927, -588596503, -55759661, 212885530, -805455693, 572269060,
415773175, -320900489, -651775079, 5276363, 91615150, -882588415,
502210147, -401039810, 26713405, -723806893, 125439289, 472777644,
869504248, 967552969, -268043646, -146710780, -511973692, -
803204681, -146827180, -453201623, -878534466, 631307563, 507752930,
-63646026, -348120807, 222898965, -410732708, 617953050, -478244422,
877782569, -507956686, -196516478, -477074335, 329039585, -
480651334, -890030740, 461391919, -977815738, -943937849, 321402466,
-588396975, -945139052, 871313567, -484830305, 365305963, 891985414,
466048577, 880607400, -245705654, 359506342, -612177301, 840415132,
693541406, 707348310, 971762025, -871678269, 897143169, 625100531,
743908163, -315815019, -63211252, -962051459, 510469141, 566817231,
-186207711, 309838979, 101194721, -127111899, -109107404, -
702499174, 918781433, 34041307, 927374088, -67369303, -680339659,
202481166, -218771120, 329951816, -280782626, -423403505, 619779171,
-567310903, -660420942, 756801677, 996208091, 822990010, 940351540,
1331227, 382201579, 891956260, -894584436, 346600029, 805733487, -
691767750, 859030444, 1]
print(Solution().minimumOperations(nums, 938, 80))
<|reserved_special_token_1|>
'''
-Medium-
*BFS*
You are given a 0-indexed integer array nums containing distinct numbers, an integer start, and an integer goal. There is an integer x that is initially set to start, and you want to perform operations on x such that it is converted to goal. You can perform the following operation repeatedly on the number x:
If 0 <= x <= 1000, then for any index i in the array (0 <= i < nums.length), you can set x to any of the following:
x + nums[i]
x - nums[i]
x ^ nums[i] (bitwise-XOR)
Note that you can use each nums[i] any number of times in any order. Operations that set x to be out of the range 0 <= x <= 1000 are valid, but no more operations can be done afterward.
Return the minimum number of operations needed to convert x = start into goal, and -1 if it is not possible.
Example 1:
Input: nums = [2,4,12], start = 2, goal = 12
Output: 2
Explanation: We can go from 2 → 14 → 12 with the following 2 operations.
- 2 + 12 = 14
- 14 - 2 = 12
Example 2:
Input: nums = [3,5,7], start = 0, goal = -4
Output: 2
Explanation: We can go from 0 → 3 → -4 with the following 2 operations.
- 0 + 3 = 3
- 3 - 7 = -4
Note that the last operation sets x out of the range 0 <= x <= 1000, which is valid.
Example 3:
Input: nums = [2,8,16], start = 0, goal = 1
Output: -1
Explanation: There is no way to convert 0 into 1.
Constraints:
1 <= nums.length <= 1000
-109 <= nums[i], goal <= 109
0 <= start <= 1000
start != goal
All the integers in nums are distinct.
'''
from typing import List
from collections import deque
class Solution:
def minimumOperations(self, nums: List[int], start: int, goal: int) -> int:
que = deque([(start,0)])
visited = set()
while que:
x, steps = que.popleft()
for i in nums:
for t in [x+i, x-i, x^i]:
if t == goal: return steps + 1
if 0 <= t <= 1000 and t not in visited:
visited.add(t)
que.append((t, steps+1))
return -1
if __name__ == "__main__":
print(Solution().minimumOperations(nums = [2,4,12], start = 2, goal = 12))
print(Solution().minimumOperations(nums = [3,5,7], start = 0, goal = -4))
print(Solution().minimumOperations(nums = [2,8,16], start = 0, goal = 1))
nums = [-574083075,-393928592,-508025046,942818778,355796909,515245901,40297943,106087952,112856312,-516143616,363801856,431681353,726373078,947630603,357311001,594181298,-797268217,-741740009,310972287,588107527,-535699426,56324906,-77958073,739798122,-839472160,439902753,-599749231,-378067373,-466272504,-668036170,404827976,805486978,-762507067,726001618,-761047930,574054980,365793614,112020312,612806855,-256862366,174046424,646109365,263765015,952305939,864217737,-236873371,-991807014,365730786,-908194963,-778205177,-949314048,-636570500,-883257881,316313456,-846577965,132287864,-143230736,425542510,-99852882,-845180792,-329895545,402782707,-52191127,-470380017,-788836785,-655887976,-899430590,481923982,45348738,-595401481,-470990760,-417390352,-570278840,-873871723,-905595403,276201114,-733014032,126018863,452235438,-512574658,-172220362,845468743,-743189114,597319839,-584451932,410604481,-508885990,-670396751,-765996786,345814977,-920014372,-826696704,640912714,119494504,745808962,-503060001,-677959595,-831428592,282855843,150678167,-467803553,-503929808,636431692,-235369757,-964826080,93942566,-65314422,-385277528,-379647659,601981747,-724269861,-516713072,-487487495,655771565,406499531,-943540581,-290169291,438686645,-227355533,-822612523,218329747,-800810927,-944724740,-978181517,274815523,296317841,56043572,-712672386,-374759873,86973233,-246165119,73819230,-801140338,414767806,883318746,-822063159,-705772942,-674915800,710520717,-97115365,599549847,115344568,53002314,242487774,-665998906,-986068895,-844909606,-515222297,-500827406,317865850,-50395059,522417393,51184184,241544846,-996297136,-227251827,924359619,822815774,149467545,523511343,252991991,450254984,-393459583,617410075,197030479,-234418418,-256650708,872334551,779068346,216294504,-708680875,-171498970,-970211466,-176493993,729939373,-658054782,-342680218,75508900,-377139149,392008859,121412250,-163586626,-468148273,624248706,50004864,-862378428,-849927586,33598413,-157654824,-229712613,149116317,183820138,378717707,-995563605,777654910,511275580,-157964872,-718605034,-764316227,-225837302,-166208500,-587688677,78982205,-488693575,667205793,419165994,731543316,97551954,-387317666,-580873271,533504431,-31624036,-356035140,-849089082,-767376392,-625237600,940717947,-337709497,915255567,727274007,-879463448,-363148174,-854892492,110472344,-466194659,-146843198,-454944217,-365338018,-349424052,994474446,-554968068,-883734951,-697723265,583756420,-5696410,-413731452,-278706136,-399245668,83345207,-227231270,618384545,846514423,-556667092,590460194,-686116067,-509669269,-510065093,77094171,270317951,166095128,-918526061,-766370855,-20861321,478791777,663673443,-152055285,224745414,123998803,66824877,-85117337,212126175,-718523523,615359230,-212148589,620733736,-81197397,51814471,709312024,562145805,-770811828,321230393,-611636320,-421337549,-804527290,-416739656,-886764000,170695026,414273830,-449987380,-56782953,772039002,-961265403,-896009751,-524231358,497253209,-507048459,-308522246,-508249054,-53240581,-241704483,-974133571,232897679,-152365934,-861310248,-305766289,340680726,844612779,-180227470,40798478,729446447,395975250,-142447074,-606021375,47555730,294446347,452346091,-409427076,-845574381,-838995437,45787728,714700474,-315824001,694717388,502723269,119244099,-538412679,-207297135,-189078560,-812610469,-350061253,-73975237,-119323509,791863263,741180208,740488891,-475394166,-191585617,-441527154,767292531,201222965,-150196525,588513813,245328283,396662663,100705864,126789247,487161165,-460512081,-469521559,-998848254,-917609155,314537168,418002454,-926920818,-628671538,179971032,-105401559,449618919,823404672,178494651,-773108884,10686795,-506642993,-60172121,-510142552,651623281,-163851428,158562600,-782456228,-336697076,-571952851,849878818,-456510759,-65997243,-506043404,-558981572,186946604,124948039,954065944,707437320,-224056616,-319237038,512138196,742466011,-49725596,-784781640,-753413026,-331602365,-246166733,-658650959,-4888181,-547553549,786689548,-866846384,-212028209,-98029403,-325422497,-409855095,320083382,-491251215,-471713326,890922019,-766590943,-481641953,-227197451,-709166930,-965945544,407688175,-78385698,-372800469,389036825,79885300,-858488452,-390177477,233839191,-518116358,420408256,872470025,241770824,-106901417,-328631191,548580365,-88408815,-647601013,658880218,-870455388,277154380,370022702,-381519264,-800726224,183685380,208169777,925905330,732494840,251754641,-681988029,593628349,153852085,353590607,242118102,-788094641,-242801844,474214244,579450364,580046580,-269927114,249739292,295331955,-544556236,-814569172,808895922,707421114,305101587,621173158,-248896453,988552702,-375313331,-87289858,-796466539,-529411285,-197315984,33984203,-122839651,-90735568,277265491,762059774,-628018119,-406508643,-856856769,364613737,59319066,614382155,-614620718,-133957131,-394985422,-29943491,154443077,-72727846,392096990,562681453,364248049,-156700958,717335155,-343408748,77301840,-155372684,-432114609,414752267,-485732822,876096548,842614035,-614245110,-872219121,291509502,334817026,214330487,405297459,-449582485,789314834,936409758,452350380,-146649749,898255045,116506422,671728835,280507922,-189039799,-565803074,-439924663,-14345985,-98428526,57303809,424685389,-84977856,-9251973,998935249,229402894,-405424548,448394272,182149207,-728030940,347577568,567511928,-27655302,400866779,-509269521,-580602375,405956020,-855173313,258091129,909162200,-315251598,-236890006,-531780379,342955474,-65890269,-111521851,-139906773,34939329,927781348,300458386,-603518159,341287362,-234266006,634183737,454833275,79631354,-954691672,102295826,688738167,-958428411,-293858940,480440548,590037773,-365477625,-425165732,170388756,164258145,-507355122,44132561,982798160,-101120201,-920959602,-239250887,534862084,-834736952,-123162323,389682556,656996523,864481760,381156936,129520066,-995551618,106129054,-471580461,856850511,653020333,531769579,-190375506,-992983956,73867968,-931909584,403329114,-945055546,627782991,-666011011,214665550,505169020,210703185,-591690068,11218620,790987020,561646751,-33552011,-407054835,-850936697,-838201457,-878394038,-759131062,-857347819,531582062,941614352,-743754869,650338718,178603580,-834368178,-976933957,138667533,746471721,551579035,-173400777,-1191455,320121832,-756997945,402594806,934711944,970489131,-193223639,276816990,842959026,-799673669,-367385466,681433973,468892554,-455199860,393993101,905435993,218314965,284795080,913357885,-652530417,743455659,869345718,808902357,829820413,7206928,544900359,225903242,-507688526,750219353,-663810717,-643969173,-269151675,348252329,-144351998,693995296,-692546103,869432378,650161259,568234384,710782517,179157604,-446849233,-922615096,-61183498,30945194,819052356,467911324,119876349,46908453,-420671619,344944591,889080726,-619477633,174882730,553799129,-941691933,146036558,-116064711,222282163,-272996845,-147041859,-381977096,-786757040,229096334,712541239,326039628,-952490563,-362214129,-680530864,421358212,-472290821,-331398150,-42297937,-393141325,-467541333,655524006,452908624,-626562356,-758303565,338224482,312047704,599445442,-328430584,259549134,838272865,-755896597,-151000710,607787908,11870257,-680877184,528161590,769242561,-447486537,-127579653,135915595,-271181270,12536315,693445551,900639800,-692327759,-671179999,977783490,935798407,659688020,-478438023,-852131846,-900332354,-71029072,888095095,924175448,430392829,391195112,399460998,-173259008,-168543477,-495967896,-697314804,591126097,301126906,946273416,-772817341,-996445410,466876435,-92937212,-226599286,43831927,-588596503,-55759661,212885530,-805455693,572269060,415773175,-320900489,-651775079,5276363,91615150,-882588415,502210147,-401039810,26713405,-723806893,125439289,472777644,869504248,967552969,-268043646,-146710780,-511973692,-803204681,-146827180,-453201623,-878534466,631307563,507752930,-63646026,-348120807,222898965,-410732708,617953050,-478244422,877782569,-507956686,-196516478,-477074335,329039585,-480651334,-890030740,461391919,-977815738,-943937849,321402466,-588396975,-945139052,871313567,-484830305,365305963,891985414,466048577,880607400,-245705654,359506342,-612177301,840415132,693541406,707348310,971762025,-871678269,897143169,625100531,743908163,-315815019,-63211252,-962051459,510469141,566817231,-186207711,309838979,101194721,-127111899,-109107404,-702499174,918781433,34041307,927374088,-67369303,-680339659,202481166,-218771120,329951816,-280782626,-423403505,619779171,-567310903,-660420942,756801677,996208091,822990010,940351540,1331227,382201579,891956260,-894584436,346600029,805733487,-691767750,859030444,1]
print(Solution().minimumOperations(nums, 938, 80))
|
flexible
|
{
"blob_id": "50b2b9d1edc8eaa44050e2b3b2375e966f16e10c",
"index": 6997,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n\n def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:\n que = deque([(start, 0)])\n visited = set()\n while que:\n x, steps = que.popleft()\n for i in nums:\n for t in [x + i, x - i, x ^ i]:\n if t == goal:\n return steps + 1\n if 0 <= t <= 1000 and t not in visited:\n visited.add(t)\n que.append((t, steps + 1))\n return -1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:\n que = deque([(start, 0)])\n visited = set()\n while que:\n x, steps = que.popleft()\n for i in nums:\n for t in [x + i, x - i, x ^ i]:\n if t == goal:\n return steps + 1\n if 0 <= t <= 1000 and t not in visited:\n visited.add(t)\n que.append((t, steps + 1))\n return -1\n\n\nif __name__ == '__main__':\n print(Solution().minimumOperations(nums=[2, 4, 12], start=2, goal=12))\n print(Solution().minimumOperations(nums=[3, 5, 7], start=0, goal=-4))\n print(Solution().minimumOperations(nums=[2, 8, 16], start=0, goal=1))\n nums = [-574083075, -393928592, -508025046, 942818778, 355796909, \n 515245901, 40297943, 106087952, 112856312, -516143616, 363801856, \n 431681353, 726373078, 947630603, 357311001, 594181298, -797268217, \n -741740009, 310972287, 588107527, -535699426, 56324906, -77958073, \n 739798122, -839472160, 439902753, -599749231, -378067373, -\n 466272504, -668036170, 404827976, 805486978, -762507067, 726001618,\n -761047930, 574054980, 365793614, 112020312, 612806855, -256862366,\n 174046424, 646109365, 263765015, 952305939, 864217737, -236873371, \n -991807014, 365730786, -908194963, -778205177, -949314048, -\n 636570500, -883257881, 316313456, -846577965, 132287864, -143230736,\n 425542510, -99852882, -845180792, -329895545, 402782707, -52191127,\n -470380017, -788836785, -655887976, -899430590, 481923982, 45348738,\n -595401481, -470990760, -417390352, -570278840, -873871723, -\n 905595403, 276201114, -733014032, 126018863, 452235438, -512574658,\n -172220362, 845468743, -743189114, 597319839, -584451932, 410604481,\n -508885990, -670396751, -765996786, 345814977, -920014372, -\n 826696704, 640912714, 119494504, 745808962, -503060001, -677959595,\n -831428592, 282855843, 150678167, -467803553, -503929808, 636431692,\n -235369757, -964826080, 93942566, -65314422, -385277528, -379647659,\n 601981747, -724269861, -516713072, -487487495, 655771565, 406499531,\n -943540581, -290169291, 438686645, -227355533, -822612523, \n 218329747, -800810927, -944724740, -978181517, 274815523, 296317841,\n 56043572, -712672386, -374759873, 86973233, -246165119, 73819230, -\n 801140338, 414767806, 883318746, -822063159, -705772942, -674915800,\n 710520717, -97115365, 599549847, 115344568, 53002314, 242487774, -\n 665998906, -986068895, -844909606, -515222297, -500827406, \n 317865850, -50395059, 522417393, 51184184, 241544846, -996297136, -\n 227251827, 924359619, 822815774, 149467545, 523511343, 252991991, \n 450254984, -393459583, 617410075, 197030479, -234418418, -256650708,\n 872334551, 779068346, 216294504, -708680875, -171498970, -970211466,\n -176493993, 729939373, -658054782, -342680218, 75508900, -377139149,\n 392008859, 121412250, -163586626, -468148273, 624248706, 50004864, \n -862378428, -849927586, 33598413, -157654824, -229712613, 149116317,\n 183820138, 378717707, -995563605, 777654910, 511275580, -157964872,\n -718605034, -764316227, -225837302, -166208500, -587688677, \n 78982205, -488693575, 667205793, 419165994, 731543316, 97551954, -\n 387317666, -580873271, 533504431, -31624036, -356035140, -849089082,\n -767376392, -625237600, 940717947, -337709497, 915255567, 727274007,\n -879463448, -363148174, -854892492, 110472344, -466194659, -\n 146843198, -454944217, -365338018, -349424052, 994474446, -\n 554968068, -883734951, -697723265, 583756420, -5696410, -413731452,\n -278706136, -399245668, 83345207, -227231270, 618384545, 846514423,\n -556667092, 590460194, -686116067, -509669269, -510065093, 77094171,\n 270317951, 166095128, -918526061, -766370855, -20861321, 478791777,\n 663673443, -152055285, 224745414, 123998803, 66824877, -85117337, \n 212126175, -718523523, 615359230, -212148589, 620733736, -81197397,\n 51814471, 709312024, 562145805, -770811828, 321230393, -611636320, \n -421337549, -804527290, -416739656, -886764000, 170695026, \n 414273830, -449987380, -56782953, 772039002, -961265403, -896009751,\n -524231358, 497253209, -507048459, -308522246, -508249054, -\n 53240581, -241704483, -974133571, 232897679, -152365934, -861310248,\n -305766289, 340680726, 844612779, -180227470, 40798478, 729446447, \n 395975250, -142447074, -606021375, 47555730, 294446347, 452346091, \n -409427076, -845574381, -838995437, 45787728, 714700474, -315824001,\n 694717388, 502723269, 119244099, -538412679, -207297135, -189078560,\n -812610469, -350061253, -73975237, -119323509, 791863263, 741180208,\n 740488891, -475394166, -191585617, -441527154, 767292531, 201222965,\n -150196525, 588513813, 245328283, 396662663, 100705864, 126789247, \n 487161165, -460512081, -469521559, -998848254, -917609155, \n 314537168, 418002454, -926920818, -628671538, 179971032, -105401559,\n 449618919, 823404672, 178494651, -773108884, 10686795, -506642993, \n -60172121, -510142552, 651623281, -163851428, 158562600, -782456228,\n -336697076, -571952851, 849878818, -456510759, -65997243, -\n 506043404, -558981572, 186946604, 124948039, 954065944, 707437320, \n -224056616, -319237038, 512138196, 742466011, -49725596, -784781640,\n -753413026, -331602365, -246166733, -658650959, -4888181, -\n 547553549, 786689548, -866846384, -212028209, -98029403, -325422497,\n -409855095, 320083382, -491251215, -471713326, 890922019, -\n 766590943, -481641953, -227197451, -709166930, -965945544, \n 407688175, -78385698, -372800469, 389036825, 79885300, -858488452, \n -390177477, 233839191, -518116358, 420408256, 872470025, 241770824,\n -106901417, -328631191, 548580365, -88408815, -647601013, 658880218,\n -870455388, 277154380, 370022702, -381519264, -800726224, 183685380,\n 208169777, 925905330, 732494840, 251754641, -681988029, 593628349, \n 153852085, 353590607, 242118102, -788094641, -242801844, 474214244,\n 579450364, 580046580, -269927114, 249739292, 295331955, -544556236,\n -814569172, 808895922, 707421114, 305101587, 621173158, -248896453,\n 988552702, -375313331, -87289858, -796466539, -529411285, -\n 197315984, 33984203, -122839651, -90735568, 277265491, 762059774, -\n 628018119, -406508643, -856856769, 364613737, 59319066, 614382155, \n -614620718, -133957131, -394985422, -29943491, 154443077, -72727846,\n 392096990, 562681453, 364248049, -156700958, 717335155, -343408748,\n 77301840, -155372684, -432114609, 414752267, -485732822, 876096548,\n 842614035, -614245110, -872219121, 291509502, 334817026, 214330487,\n 405297459, -449582485, 789314834, 936409758, 452350380, -146649749,\n 898255045, 116506422, 671728835, 280507922, -189039799, -565803074,\n -439924663, -14345985, -98428526, 57303809, 424685389, -84977856, -\n 9251973, 998935249, 229402894, -405424548, 448394272, 182149207, -\n 728030940, 347577568, 567511928, -27655302, 400866779, -509269521, \n -580602375, 405956020, -855173313, 258091129, 909162200, -315251598,\n -236890006, -531780379, 342955474, -65890269, -111521851, -\n 139906773, 34939329, 927781348, 300458386, -603518159, 341287362, -\n 234266006, 634183737, 454833275, 79631354, -954691672, 102295826, \n 688738167, -958428411, -293858940, 480440548, 590037773, -365477625,\n -425165732, 170388756, 164258145, -507355122, 44132561, 982798160, \n -101120201, -920959602, -239250887, 534862084, -834736952, -\n 123162323, 389682556, 656996523, 864481760, 381156936, 129520066, -\n 995551618, 106129054, -471580461, 856850511, 653020333, 531769579, \n -190375506, -992983956, 73867968, -931909584, 403329114, -945055546,\n 627782991, -666011011, 214665550, 505169020, 210703185, -591690068,\n 11218620, 790987020, 561646751, -33552011, -407054835, -850936697, \n -838201457, -878394038, -759131062, -857347819, 531582062, \n 941614352, -743754869, 650338718, 178603580, -834368178, -976933957,\n 138667533, 746471721, 551579035, -173400777, -1191455, 320121832, -\n 756997945, 402594806, 934711944, 970489131, -193223639, 276816990, \n 842959026, -799673669, -367385466, 681433973, 468892554, -455199860,\n 393993101, 905435993, 218314965, 284795080, 913357885, -652530417, \n 743455659, 869345718, 808902357, 829820413, 7206928, 544900359, \n 225903242, -507688526, 750219353, -663810717, -643969173, -\n 269151675, 348252329, -144351998, 693995296, -692546103, 869432378,\n 650161259, 568234384, 710782517, 179157604, -446849233, -922615096,\n -61183498, 30945194, 819052356, 467911324, 119876349, 46908453, -\n 420671619, 344944591, 889080726, -619477633, 174882730, 553799129, \n -941691933, 146036558, -116064711, 222282163, -272996845, -\n 147041859, -381977096, -786757040, 229096334, 712541239, 326039628,\n -952490563, -362214129, -680530864, 421358212, -472290821, -\n 331398150, -42297937, -393141325, -467541333, 655524006, 452908624,\n -626562356, -758303565, 338224482, 312047704, 599445442, -328430584,\n 259549134, 838272865, -755896597, -151000710, 607787908, 11870257, \n -680877184, 528161590, 769242561, -447486537, -127579653, 135915595,\n -271181270, 12536315, 693445551, 900639800, -692327759, -671179999,\n 977783490, 935798407, 659688020, -478438023, -852131846, -900332354,\n -71029072, 888095095, 924175448, 430392829, 391195112, 399460998, -\n 173259008, -168543477, -495967896, -697314804, 591126097, 301126906,\n 946273416, -772817341, -996445410, 466876435, -92937212, -226599286,\n 43831927, -588596503, -55759661, 212885530, -805455693, 572269060, \n 415773175, -320900489, -651775079, 5276363, 91615150, -882588415, \n 502210147, -401039810, 26713405, -723806893, 125439289, 472777644, \n 869504248, 967552969, -268043646, -146710780, -511973692, -\n 803204681, -146827180, -453201623, -878534466, 631307563, 507752930,\n -63646026, -348120807, 222898965, -410732708, 617953050, -478244422,\n 877782569, -507956686, -196516478, -477074335, 329039585, -\n 480651334, -890030740, 461391919, -977815738, -943937849, 321402466,\n -588396975, -945139052, 871313567, -484830305, 365305963, 891985414,\n 466048577, 880607400, -245705654, 359506342, -612177301, 840415132,\n 693541406, 707348310, 971762025, -871678269, 897143169, 625100531, \n 743908163, -315815019, -63211252, -962051459, 510469141, 566817231,\n -186207711, 309838979, 101194721, -127111899, -109107404, -\n 702499174, 918781433, 34041307, 927374088, -67369303, -680339659, \n 202481166, -218771120, 329951816, -280782626, -423403505, 619779171,\n -567310903, -660420942, 756801677, 996208091, 822990010, 940351540,\n 1331227, 382201579, 891956260, -894584436, 346600029, 805733487, -\n 691767750, 859030444, 1]\n print(Solution().minimumOperations(nums, 938, 80))\n",
"step-4": "<mask token>\nfrom typing import List\nfrom collections import deque\n\n\nclass Solution:\n\n def minimumOperations(self, nums: List[int], start: int, goal: int) ->int:\n que = deque([(start, 0)])\n visited = set()\n while que:\n x, steps = que.popleft()\n for i in nums:\n for t in [x + i, x - i, x ^ i]:\n if t == goal:\n return steps + 1\n if 0 <= t <= 1000 and t not in visited:\n visited.add(t)\n que.append((t, steps + 1))\n return -1\n\n\nif __name__ == '__main__':\n print(Solution().minimumOperations(nums=[2, 4, 12], start=2, goal=12))\n print(Solution().minimumOperations(nums=[3, 5, 7], start=0, goal=-4))\n print(Solution().minimumOperations(nums=[2, 8, 16], start=0, goal=1))\n nums = [-574083075, -393928592, -508025046, 942818778, 355796909, \n 515245901, 40297943, 106087952, 112856312, -516143616, 363801856, \n 431681353, 726373078, 947630603, 357311001, 594181298, -797268217, \n -741740009, 310972287, 588107527, -535699426, 56324906, -77958073, \n 739798122, -839472160, 439902753, -599749231, -378067373, -\n 466272504, -668036170, 404827976, 805486978, -762507067, 726001618,\n -761047930, 574054980, 365793614, 112020312, 612806855, -256862366,\n 174046424, 646109365, 263765015, 952305939, 864217737, -236873371, \n -991807014, 365730786, -908194963, -778205177, -949314048, -\n 636570500, -883257881, 316313456, -846577965, 132287864, -143230736,\n 425542510, -99852882, -845180792, -329895545, 402782707, -52191127,\n -470380017, -788836785, -655887976, -899430590, 481923982, 45348738,\n -595401481, -470990760, -417390352, -570278840, -873871723, -\n 905595403, 276201114, -733014032, 126018863, 452235438, -512574658,\n -172220362, 845468743, -743189114, 597319839, -584451932, 410604481,\n -508885990, -670396751, -765996786, 345814977, -920014372, -\n 826696704, 640912714, 119494504, 745808962, -503060001, -677959595,\n -831428592, 282855843, 150678167, -467803553, -503929808, 636431692,\n -235369757, -964826080, 93942566, -65314422, -385277528, -379647659,\n 601981747, -724269861, -516713072, -487487495, 655771565, 406499531,\n -943540581, -290169291, 438686645, -227355533, -822612523, \n 218329747, -800810927, -944724740, -978181517, 274815523, 296317841,\n 56043572, -712672386, -374759873, 86973233, -246165119, 73819230, -\n 801140338, 414767806, 883318746, -822063159, -705772942, -674915800,\n 710520717, -97115365, 599549847, 115344568, 53002314, 242487774, -\n 665998906, -986068895, -844909606, -515222297, -500827406, \n 317865850, -50395059, 522417393, 51184184, 241544846, -996297136, -\n 227251827, 924359619, 822815774, 149467545, 523511343, 252991991, \n 450254984, -393459583, 617410075, 197030479, -234418418, -256650708,\n 872334551, 779068346, 216294504, -708680875, -171498970, -970211466,\n -176493993, 729939373, -658054782, -342680218, 75508900, -377139149,\n 392008859, 121412250, -163586626, -468148273, 624248706, 50004864, \n -862378428, -849927586, 33598413, -157654824, -229712613, 149116317,\n 183820138, 378717707, -995563605, 777654910, 511275580, -157964872,\n -718605034, -764316227, -225837302, -166208500, -587688677, \n 78982205, -488693575, 667205793, 419165994, 731543316, 97551954, -\n 387317666, -580873271, 533504431, -31624036, -356035140, -849089082,\n -767376392, -625237600, 940717947, -337709497, 915255567, 727274007,\n -879463448, -363148174, -854892492, 110472344, -466194659, -\n 146843198, -454944217, -365338018, -349424052, 994474446, -\n 554968068, -883734951, -697723265, 583756420, -5696410, -413731452,\n -278706136, -399245668, 83345207, -227231270, 618384545, 846514423,\n -556667092, 590460194, -686116067, -509669269, -510065093, 77094171,\n 270317951, 166095128, -918526061, -766370855, -20861321, 478791777,\n 663673443, -152055285, 224745414, 123998803, 66824877, -85117337, \n 212126175, -718523523, 615359230, -212148589, 620733736, -81197397,\n 51814471, 709312024, 562145805, -770811828, 321230393, -611636320, \n -421337549, -804527290, -416739656, -886764000, 170695026, \n 414273830, -449987380, -56782953, 772039002, -961265403, -896009751,\n -524231358, 497253209, -507048459, -308522246, -508249054, -\n 53240581, -241704483, -974133571, 232897679, -152365934, -861310248,\n -305766289, 340680726, 844612779, -180227470, 40798478, 729446447, \n 395975250, -142447074, -606021375, 47555730, 294446347, 452346091, \n -409427076, -845574381, -838995437, 45787728, 714700474, -315824001,\n 694717388, 502723269, 119244099, -538412679, -207297135, -189078560,\n -812610469, -350061253, -73975237, -119323509, 791863263, 741180208,\n 740488891, -475394166, -191585617, -441527154, 767292531, 201222965,\n -150196525, 588513813, 245328283, 396662663, 100705864, 126789247, \n 487161165, -460512081, -469521559, -998848254, -917609155, \n 314537168, 418002454, -926920818, -628671538, 179971032, -105401559,\n 449618919, 823404672, 178494651, -773108884, 10686795, -506642993, \n -60172121, -510142552, 651623281, -163851428, 158562600, -782456228,\n -336697076, -571952851, 849878818, -456510759, -65997243, -\n 506043404, -558981572, 186946604, 124948039, 954065944, 707437320, \n -224056616, -319237038, 512138196, 742466011, -49725596, -784781640,\n -753413026, -331602365, -246166733, -658650959, -4888181, -\n 547553549, 786689548, -866846384, -212028209, -98029403, -325422497,\n -409855095, 320083382, -491251215, -471713326, 890922019, -\n 766590943, -481641953, -227197451, -709166930, -965945544, \n 407688175, -78385698, -372800469, 389036825, 79885300, -858488452, \n -390177477, 233839191, -518116358, 420408256, 872470025, 241770824,\n -106901417, -328631191, 548580365, -88408815, -647601013, 658880218,\n -870455388, 277154380, 370022702, -381519264, -800726224, 183685380,\n 208169777, 925905330, 732494840, 251754641, -681988029, 593628349, \n 153852085, 353590607, 242118102, -788094641, -242801844, 474214244,\n 579450364, 580046580, -269927114, 249739292, 295331955, -544556236,\n -814569172, 808895922, 707421114, 305101587, 621173158, -248896453,\n 988552702, -375313331, -87289858, -796466539, -529411285, -\n 197315984, 33984203, -122839651, -90735568, 277265491, 762059774, -\n 628018119, -406508643, -856856769, 364613737, 59319066, 614382155, \n -614620718, -133957131, -394985422, -29943491, 154443077, -72727846,\n 392096990, 562681453, 364248049, -156700958, 717335155, -343408748,\n 77301840, -155372684, -432114609, 414752267, -485732822, 876096548,\n 842614035, -614245110, -872219121, 291509502, 334817026, 214330487,\n 405297459, -449582485, 789314834, 936409758, 452350380, -146649749,\n 898255045, 116506422, 671728835, 280507922, -189039799, -565803074,\n -439924663, -14345985, -98428526, 57303809, 424685389, -84977856, -\n 9251973, 998935249, 229402894, -405424548, 448394272, 182149207, -\n 728030940, 347577568, 567511928, -27655302, 400866779, -509269521, \n -580602375, 405956020, -855173313, 258091129, 909162200, -315251598,\n -236890006, -531780379, 342955474, -65890269, -111521851, -\n 139906773, 34939329, 927781348, 300458386, -603518159, 341287362, -\n 234266006, 634183737, 454833275, 79631354, -954691672, 102295826, \n 688738167, -958428411, -293858940, 480440548, 590037773, -365477625,\n -425165732, 170388756, 164258145, -507355122, 44132561, 982798160, \n -101120201, -920959602, -239250887, 534862084, -834736952, -\n 123162323, 389682556, 656996523, 864481760, 381156936, 129520066, -\n 995551618, 106129054, -471580461, 856850511, 653020333, 531769579, \n -190375506, -992983956, 73867968, -931909584, 403329114, -945055546,\n 627782991, -666011011, 214665550, 505169020, 210703185, -591690068,\n 11218620, 790987020, 561646751, -33552011, -407054835, -850936697, \n -838201457, -878394038, -759131062, -857347819, 531582062, \n 941614352, -743754869, 650338718, 178603580, -834368178, -976933957,\n 138667533, 746471721, 551579035, -173400777, -1191455, 320121832, -\n 756997945, 402594806, 934711944, 970489131, -193223639, 276816990, \n 842959026, -799673669, -367385466, 681433973, 468892554, -455199860,\n 393993101, 905435993, 218314965, 284795080, 913357885, -652530417, \n 743455659, 869345718, 808902357, 829820413, 7206928, 544900359, \n 225903242, -507688526, 750219353, -663810717, -643969173, -\n 269151675, 348252329, -144351998, 693995296, -692546103, 869432378,\n 650161259, 568234384, 710782517, 179157604, -446849233, -922615096,\n -61183498, 30945194, 819052356, 467911324, 119876349, 46908453, -\n 420671619, 344944591, 889080726, -619477633, 174882730, 553799129, \n -941691933, 146036558, -116064711, 222282163, -272996845, -\n 147041859, -381977096, -786757040, 229096334, 712541239, 326039628,\n -952490563, -362214129, -680530864, 421358212, -472290821, -\n 331398150, -42297937, -393141325, -467541333, 655524006, 452908624,\n -626562356, -758303565, 338224482, 312047704, 599445442, -328430584,\n 259549134, 838272865, -755896597, -151000710, 607787908, 11870257, \n -680877184, 528161590, 769242561, -447486537, -127579653, 135915595,\n -271181270, 12536315, 693445551, 900639800, -692327759, -671179999,\n 977783490, 935798407, 659688020, -478438023, -852131846, -900332354,\n -71029072, 888095095, 924175448, 430392829, 391195112, 399460998, -\n 173259008, -168543477, -495967896, -697314804, 591126097, 301126906,\n 946273416, -772817341, -996445410, 466876435, -92937212, -226599286,\n 43831927, -588596503, -55759661, 212885530, -805455693, 572269060, \n 415773175, -320900489, -651775079, 5276363, 91615150, -882588415, \n 502210147, -401039810, 26713405, -723806893, 125439289, 472777644, \n 869504248, 967552969, -268043646, -146710780, -511973692, -\n 803204681, -146827180, -453201623, -878534466, 631307563, 507752930,\n -63646026, -348120807, 222898965, -410732708, 617953050, -478244422,\n 877782569, -507956686, -196516478, -477074335, 329039585, -\n 480651334, -890030740, 461391919, -977815738, -943937849, 321402466,\n -588396975, -945139052, 871313567, -484830305, 365305963, 891985414,\n 466048577, 880607400, -245705654, 359506342, -612177301, 840415132,\n 693541406, 707348310, 971762025, -871678269, 897143169, 625100531, \n 743908163, -315815019, -63211252, -962051459, 510469141, 566817231,\n -186207711, 309838979, 101194721, -127111899, -109107404, -\n 702499174, 918781433, 34041307, 927374088, -67369303, -680339659, \n 202481166, -218771120, 329951816, -280782626, -423403505, 619779171,\n -567310903, -660420942, 756801677, 996208091, 822990010, 940351540,\n 1331227, 382201579, 891956260, -894584436, 346600029, 805733487, -\n 691767750, 859030444, 1]\n print(Solution().minimumOperations(nums, 938, 80))\n",
"step-5": "'''\n-Medium-\n*BFS*\n\nYou are given a 0-indexed integer array nums containing distinct numbers, an integer start, and an integer goal. There is an integer x that is initially set to start, and you want to perform operations on x such that it is converted to goal. You can perform the following operation repeatedly on the number x:\n\nIf 0 <= x <= 1000, then for any index i in the array (0 <= i < nums.length), you can set x to any of the following:\n\nx + nums[i]\nx - nums[i]\nx ^ nums[i] (bitwise-XOR)\nNote that you can use each nums[i] any number of times in any order. Operations that set x to be out of the range 0 <= x <= 1000 are valid, but no more operations can be done afterward.\n\nReturn the minimum number of operations needed to convert x = start into goal, and -1 if it is not possible.\n\n \n\nExample 1:\n\nInput: nums = [2,4,12], start = 2, goal = 12\nOutput: 2\nExplanation: We can go from 2 → 14 → 12 with the following 2 operations.\n- 2 + 12 = 14\n- 14 - 2 = 12\nExample 2:\n\nInput: nums = [3,5,7], start = 0, goal = -4\nOutput: 2\nExplanation: We can go from 0 → 3 → -4 with the following 2 operations. \n- 0 + 3 = 3\n- 3 - 7 = -4\nNote that the last operation sets x out of the range 0 <= x <= 1000, which is valid.\nExample 3:\n\nInput: nums = [2,8,16], start = 0, goal = 1\nOutput: -1\nExplanation: There is no way to convert 0 into 1.\n \n\nConstraints:\n\n1 <= nums.length <= 1000\n-109 <= nums[i], goal <= 109\n0 <= start <= 1000\nstart != goal\nAll the integers in nums are distinct.\n\n\n'''\n\nfrom typing import List\nfrom collections import deque\n\nclass Solution:\n def minimumOperations(self, nums: List[int], start: int, goal: int) -> int:\n \n que = deque([(start,0)]) \n visited = set() \n while que:\n x, steps = que.popleft() \n for i in nums:\n for t in [x+i, x-i, x^i]:\n if t == goal: return steps + 1\n if 0 <= t <= 1000 and t not in visited:\n visited.add(t)\n que.append((t, steps+1))\n return -1\n\n \n\n\n\n \n\n\nif __name__ == \"__main__\":\n print(Solution().minimumOperations(nums = [2,4,12], start = 2, goal = 12))\n print(Solution().minimumOperations(nums = [3,5,7], start = 0, goal = -4))\n print(Solution().minimumOperations(nums = [2,8,16], start = 0, goal = 1))\n nums = [-574083075,-393928592,-508025046,942818778,355796909,515245901,40297943,106087952,112856312,-516143616,363801856,431681353,726373078,947630603,357311001,594181298,-797268217,-741740009,310972287,588107527,-535699426,56324906,-77958073,739798122,-839472160,439902753,-599749231,-378067373,-466272504,-668036170,404827976,805486978,-762507067,726001618,-761047930,574054980,365793614,112020312,612806855,-256862366,174046424,646109365,263765015,952305939,864217737,-236873371,-991807014,365730786,-908194963,-778205177,-949314048,-636570500,-883257881,316313456,-846577965,132287864,-143230736,425542510,-99852882,-845180792,-329895545,402782707,-52191127,-470380017,-788836785,-655887976,-899430590,481923982,45348738,-595401481,-470990760,-417390352,-570278840,-873871723,-905595403,276201114,-733014032,126018863,452235438,-512574658,-172220362,845468743,-743189114,597319839,-584451932,410604481,-508885990,-670396751,-765996786,345814977,-920014372,-826696704,640912714,119494504,745808962,-503060001,-677959595,-831428592,282855843,150678167,-467803553,-503929808,636431692,-235369757,-964826080,93942566,-65314422,-385277528,-379647659,601981747,-724269861,-516713072,-487487495,655771565,406499531,-943540581,-290169291,438686645,-227355533,-822612523,218329747,-800810927,-944724740,-978181517,274815523,296317841,56043572,-712672386,-374759873,86973233,-246165119,73819230,-801140338,414767806,883318746,-822063159,-705772942,-674915800,710520717,-97115365,599549847,115344568,53002314,242487774,-665998906,-986068895,-844909606,-515222297,-500827406,317865850,-50395059,522417393,51184184,241544846,-996297136,-227251827,924359619,822815774,149467545,523511343,252991991,450254984,-393459583,617410075,197030479,-234418418,-256650708,872334551,779068346,216294504,-708680875,-171498970,-970211466,-176493993,729939373,-658054782,-342680218,75508900,-377139149,392008859,121412250,-163586626,-468148273,624248706,50004864,-862378428,-849927586,33598413,-157654824,-229712613,149116317,183820138,378717707,-995563605,777654910,511275580,-157964872,-718605034,-764316227,-225837302,-166208500,-587688677,78982205,-488693575,667205793,419165994,731543316,97551954,-387317666,-580873271,533504431,-31624036,-356035140,-849089082,-767376392,-625237600,940717947,-337709497,915255567,727274007,-879463448,-363148174,-854892492,110472344,-466194659,-146843198,-454944217,-365338018,-349424052,994474446,-554968068,-883734951,-697723265,583756420,-5696410,-413731452,-278706136,-399245668,83345207,-227231270,618384545,846514423,-556667092,590460194,-686116067,-509669269,-510065093,77094171,270317951,166095128,-918526061,-766370855,-20861321,478791777,663673443,-152055285,224745414,123998803,66824877,-85117337,212126175,-718523523,615359230,-212148589,620733736,-81197397,51814471,709312024,562145805,-770811828,321230393,-611636320,-421337549,-804527290,-416739656,-886764000,170695026,414273830,-449987380,-56782953,772039002,-961265403,-896009751,-524231358,497253209,-507048459,-308522246,-508249054,-53240581,-241704483,-974133571,232897679,-152365934,-861310248,-305766289,340680726,844612779,-180227470,40798478,729446447,395975250,-142447074,-606021375,47555730,294446347,452346091,-409427076,-845574381,-838995437,45787728,714700474,-315824001,694717388,502723269,119244099,-538412679,-207297135,-189078560,-812610469,-350061253,-73975237,-119323509,791863263,741180208,740488891,-475394166,-191585617,-441527154,767292531,201222965,-150196525,588513813,245328283,396662663,100705864,126789247,487161165,-460512081,-469521559,-998848254,-917609155,314537168,418002454,-926920818,-628671538,179971032,-105401559,449618919,823404672,178494651,-773108884,10686795,-506642993,-60172121,-510142552,651623281,-163851428,158562600,-782456228,-336697076,-571952851,849878818,-456510759,-65997243,-506043404,-558981572,186946604,124948039,954065944,707437320,-224056616,-319237038,512138196,742466011,-49725596,-784781640,-753413026,-331602365,-246166733,-658650959,-4888181,-547553549,786689548,-866846384,-212028209,-98029403,-325422497,-409855095,320083382,-491251215,-471713326,890922019,-766590943,-481641953,-227197451,-709166930,-965945544,407688175,-78385698,-372800469,389036825,79885300,-858488452,-390177477,233839191,-518116358,420408256,872470025,241770824,-106901417,-328631191,548580365,-88408815,-647601013,658880218,-870455388,277154380,370022702,-381519264,-800726224,183685380,208169777,925905330,732494840,251754641,-681988029,593628349,153852085,353590607,242118102,-788094641,-242801844,474214244,579450364,580046580,-269927114,249739292,295331955,-544556236,-814569172,808895922,707421114,305101587,621173158,-248896453,988552702,-375313331,-87289858,-796466539,-529411285,-197315984,33984203,-122839651,-90735568,277265491,762059774,-628018119,-406508643,-856856769,364613737,59319066,614382155,-614620718,-133957131,-394985422,-29943491,154443077,-72727846,392096990,562681453,364248049,-156700958,717335155,-343408748,77301840,-155372684,-432114609,414752267,-485732822,876096548,842614035,-614245110,-872219121,291509502,334817026,214330487,405297459,-449582485,789314834,936409758,452350380,-146649749,898255045,116506422,671728835,280507922,-189039799,-565803074,-439924663,-14345985,-98428526,57303809,424685389,-84977856,-9251973,998935249,229402894,-405424548,448394272,182149207,-728030940,347577568,567511928,-27655302,400866779,-509269521,-580602375,405956020,-855173313,258091129,909162200,-315251598,-236890006,-531780379,342955474,-65890269,-111521851,-139906773,34939329,927781348,300458386,-603518159,341287362,-234266006,634183737,454833275,79631354,-954691672,102295826,688738167,-958428411,-293858940,480440548,590037773,-365477625,-425165732,170388756,164258145,-507355122,44132561,982798160,-101120201,-920959602,-239250887,534862084,-834736952,-123162323,389682556,656996523,864481760,381156936,129520066,-995551618,106129054,-471580461,856850511,653020333,531769579,-190375506,-992983956,73867968,-931909584,403329114,-945055546,627782991,-666011011,214665550,505169020,210703185,-591690068,11218620,790987020,561646751,-33552011,-407054835,-850936697,-838201457,-878394038,-759131062,-857347819,531582062,941614352,-743754869,650338718,178603580,-834368178,-976933957,138667533,746471721,551579035,-173400777,-1191455,320121832,-756997945,402594806,934711944,970489131,-193223639,276816990,842959026,-799673669,-367385466,681433973,468892554,-455199860,393993101,905435993,218314965,284795080,913357885,-652530417,743455659,869345718,808902357,829820413,7206928,544900359,225903242,-507688526,750219353,-663810717,-643969173,-269151675,348252329,-144351998,693995296,-692546103,869432378,650161259,568234384,710782517,179157604,-446849233,-922615096,-61183498,30945194,819052356,467911324,119876349,46908453,-420671619,344944591,889080726,-619477633,174882730,553799129,-941691933,146036558,-116064711,222282163,-272996845,-147041859,-381977096,-786757040,229096334,712541239,326039628,-952490563,-362214129,-680530864,421358212,-472290821,-331398150,-42297937,-393141325,-467541333,655524006,452908624,-626562356,-758303565,338224482,312047704,599445442,-328430584,259549134,838272865,-755896597,-151000710,607787908,11870257,-680877184,528161590,769242561,-447486537,-127579653,135915595,-271181270,12536315,693445551,900639800,-692327759,-671179999,977783490,935798407,659688020,-478438023,-852131846,-900332354,-71029072,888095095,924175448,430392829,391195112,399460998,-173259008,-168543477,-495967896,-697314804,591126097,301126906,946273416,-772817341,-996445410,466876435,-92937212,-226599286,43831927,-588596503,-55759661,212885530,-805455693,572269060,415773175,-320900489,-651775079,5276363,91615150,-882588415,502210147,-401039810,26713405,-723806893,125439289,472777644,869504248,967552969,-268043646,-146710780,-511973692,-803204681,-146827180,-453201623,-878534466,631307563,507752930,-63646026,-348120807,222898965,-410732708,617953050,-478244422,877782569,-507956686,-196516478,-477074335,329039585,-480651334,-890030740,461391919,-977815738,-943937849,321402466,-588396975,-945139052,871313567,-484830305,365305963,891985414,466048577,880607400,-245705654,359506342,-612177301,840415132,693541406,707348310,971762025,-871678269,897143169,625100531,743908163,-315815019,-63211252,-962051459,510469141,566817231,-186207711,309838979,101194721,-127111899,-109107404,-702499174,918781433,34041307,927374088,-67369303,-680339659,202481166,-218771120,329951816,-280782626,-423403505,619779171,-567310903,-660420942,756801677,996208091,822990010,940351540,1331227,382201579,891956260,-894584436,346600029,805733487,-691767750,859030444,1]\n print(Solution().minimumOperations(nums, 938, 80))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data,
output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range
):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
for depth, error in depth_n_errors:
if error < best_error:
best_error = error
best_depth = depth
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def make_submission(y_predict, user_id_test, movie_id_test, name=None, date
=True):
n_elements = len(y_predict)
if name is None:
name = 'submission'
if date:
name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))
with open(name + '.csv', 'w') as f:
f.write('"USER_ID_MOVIE_ID","PREDICTED_RATING"\n')
for i in range(n_elements):
if np.isnan(y_predict[i]):
raise ValueError('NaN detected!')
line = '{:0.0f},{:0.0f},{}\n'.format(user_id_test[i],
movie_id_test[i], y_predict[i])
f.write(line)
print('Submission file successfully written!')
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data,
output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range
):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
for depth, error in depth_n_errors:
if error < best_error:
best_error = error
best_depth = depth
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
<|reserved_special_token_0|>
print(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def make_submission(y_predict, user_id_test, movie_id_test, name=None, date
=True):
n_elements = len(y_predict)
if name is None:
name = 'submission'
if date:
name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))
with open(name + '.csv', 'w') as f:
f.write('"USER_ID_MOVIE_ID","PREDICTED_RATING"\n')
for i in range(n_elements):
if np.isnan(y_predict[i]):
raise ValueError('NaN detected!')
line = '{:0.0f},{:0.0f},{}\n'.format(user_id_test[i],
movie_id_test[i], y_predict[i])
f.write(line)
print('Submission file successfully written!')
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data,
output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range
):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
for depth, error in depth_n_errors:
if error < best_error:
best_error = error
best_depth = depth
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
users = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',
delimiter=',')
movies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')
train = pd.read_csv('data/data_train.csv', delimiter=',')
output = pd.read_csv('data/output_train.csv', delimiter=',')['rating']
aggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',
delimiter=',')
ms = ModelSelection(users.values, movies.values, aggregated.values, train.
values, output)
print(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import datetime
import time
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import KNeighborsRegressor
from sklearn.model_selection import cross_val_score
from sklearn import preprocessing
from sklearn.model_selection import KFold
def make_submission(y_predict, user_id_test, movie_id_test, name=None, date
=True):
n_elements = len(y_predict)
if name is None:
name = 'submission'
if date:
name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))
with open(name + '.csv', 'w') as f:
f.write('"USER_ID_MOVIE_ID","PREDICTED_RATING"\n')
for i in range(n_elements):
if np.isnan(y_predict[i]):
raise ValueError('NaN detected!')
line = '{:0.0f},{:0.0f},{}\n'.format(user_id_test[i],
movie_id_test[i], y_predict[i])
f.write(line)
print('Submission file successfully written!')
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data,
output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range
):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
for depth, error in depth_n_errors:
if error < best_error:
best_error = error
best_depth = depth
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
for train_index, test_index in kf.split(self.aggregated):
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index],
self.output_train[train_index], cv=nb_fold, scoring=
'neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
for k, error in k_n_errors:
if error < best_error:
best_error = error
best_k = k
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.
output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return [best_k, final_error.mean()]
users = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',
delimiter=',')
movies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')
train = pd.read_csv('data/data_train.csv', delimiter=',')
output = pd.read_csv('data/output_train.csv', delimiter=',')['rating']
aggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',
delimiter=',')
ms = ModelSelection(users.values, movies.values, aggregated.values, train.
values, output)
print(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))
<|reserved_special_token_1|>
import numpy as np
import pandas as pd
import datetime
import time
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import KNeighborsRegressor
from sklearn.model_selection import cross_val_score
from sklearn import preprocessing
from sklearn.model_selection import KFold
def make_submission(y_predict, user_id_test, movie_id_test, name=None, date=True):
n_elements = len(y_predict)
if name is None:
name = 'submission'
if date:
name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))
with open(name + ".csv", 'w') as f:
f.write('"USER_ID_MOVIE_ID","PREDICTED_RATING"\n')
for i in range(n_elements):
if np.isnan(y_predict[i]):
raise ValueError('NaN detected!')
line = '{:0.0f},{:0.0f},{}\n'.format(user_id_test[i],movie_id_test[i],y_predict[i])
f.write(line)
print("Submission file successfully written!")
class ModelSelection:
def __init__(self, user_data, movie_data, aggregated_data, train_data, output_train):
self.train = train_data
self.users = user_data
self.aggregated = aggregated_data
self.movies = movie_data
self.output_train = output_train
def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range):
kf = KFold(n_splits=nb_fold)
depth_n_errors = np.zeros((max_depth_range.__len__(), 2))
i = 0
for depth in max_depth_range:
depth_n_errors[i][0] = depth
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for depth in max_depth_range:
dt = DecisionTreeClassifier(max_depth=depth)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
depth_n_errors[i][1] += -scores.mean()
i += 1
i = 0
for depth in max_depth_range:
depth_n_errors[i][1] /= nb_fold
i += 1
best_depth = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for depth, error in depth_n_errors:
if(error < best_error):
best_error = error
best_depth = depth
#Recompute the error for this model on the whole data set
dt = DecisionTreeClassifier(max_depth=best_depth)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_depth, final_error.mean()]
def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for k in k_range:
dt = KNeighborsClassifier(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for k, error in k_n_errors:
if(error < best_error):
best_error = error
best_k = k
#Recompute the error for this model on the whole data set
dt = KNeighborsClassifier(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_k, final_error.mean()]
def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):
kf = KFold(n_splits=nb_fold)
k_n_errors = np.zeros((k_range.__len__(), 2))
i = 0
for k in k_range:
k_n_errors[i][0] = k
i += 1
#First round of cv
for train_index, test_index in kf.split(self.aggregated):
#Second round of cv
i = 0
for k in k_range:
dt = KNeighborsRegressor(n_neighbors=k)
scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')
k_n_errors[i][1] += -scores.mean()
i += 1
for i in range(k_range.__len__()):
k_n_errors[i][1] /= nb_fold
best_k = 0
best_error = 5
#Take the best model and cross validate it on the whole data
for k, error in k_n_errors:
if(error < best_error):
best_error = error
best_k = k
#Recompute the error for this model on the whole data set
dt = KNeighborsRegressor(n_neighbors=best_k)
final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')
return[best_k, final_error.mean()]
users = pd.read_csv("data/user_data_normalized_28-11-2016_01h32.csv", delimiter=",")
movies = pd.read_csv("data/movie_data_normalized.csv", delimiter=",")
train = pd.read_csv("data/data_train.csv", delimiter=",")
output = pd.read_csv("data/output_train.csv", delimiter=",")["rating"]
aggregated = pd.read_csv("data/agregated_data_28-11-2016_01h50.csv", delimiter=",")
ms = ModelSelection(users.values, movies.values, aggregated.values, train.values, output)
#print(ms.optimizeParametersDecisionTreeClassifier(5, range(2,3,1)))
print(ms.optimizeParametersKNeighborsClassifier(5, range(1,5,1)))
#print(ms.optimizeParametersKNeighborsClassifier(5, range(5,10,1)))
|
flexible
|
{
"blob_id": "5172819da135600d0764033a85a4175098274806",
"index": 7388,
"step-1": "<mask token>\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\n<mask token>\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-3": "<mask token>\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\nusers = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',\n delimiter=',')\nmovies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')\ntrain = pd.read_csv('data/data_train.csv', delimiter=',')\noutput = pd.read_csv('data/output_train.csv', delimiter=',')['rating']\naggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',\n delimiter=',')\nms = ModelSelection(users.values, movies.values, aggregated.values, train.\n values, output)\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport datetime\nimport time\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.neighbors import KNeighborsRegressor\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import KFold\n\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date\n =True):\n n_elements = len(y_predict)\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n with open(name + '.csv', 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],\n movie_id_test[i], y_predict[i])\n f.write(line)\n print('Submission file successfully written!')\n\n\nclass ModelSelection:\n\n def __init__(self, user_data, movie_data, aggregated_data, train_data,\n output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range\n ):\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n best_depth = 0\n best_error = 5\n for depth, error in depth_n_errors:\n if error < best_error:\n best_error = error\n best_depth = depth\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_depth, final_error.mean()]\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n for train_index, test_index in kf.split(self.aggregated):\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index],\n self.output_train[train_index], cv=nb_fold, scoring=\n 'neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n best_k = 0\n best_error = 5\n for k, error in k_n_errors:\n if error < best_error:\n best_error = error\n best_k = k\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.\n output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n return [best_k, final_error.mean()]\n\n\nusers = pd.read_csv('data/user_data_normalized_28-11-2016_01h32.csv',\n delimiter=',')\nmovies = pd.read_csv('data/movie_data_normalized.csv', delimiter=',')\ntrain = pd.read_csv('data/data_train.csv', delimiter=',')\noutput = pd.read_csv('data/output_train.csv', delimiter=',')['rating']\naggregated = pd.read_csv('data/agregated_data_28-11-2016_01h50.csv',\n delimiter=',')\nms = ModelSelection(users.values, movies.values, aggregated.values, train.\n values, output)\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1, 5, 1)))\n",
"step-5": "import numpy as np\nimport pandas as pd\nimport datetime\nimport time\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.neighbors import KNeighborsRegressor\nfrom sklearn.model_selection import cross_val_score\nfrom sklearn import preprocessing\nfrom sklearn.model_selection import KFold\n\ndef make_submission(y_predict, user_id_test, movie_id_test, name=None, date=True):\n n_elements = len(y_predict)\n\n if name is None:\n name = 'submission'\n if date:\n name = name + '_{}'.format(time.strftime('%d-%m-%Y_%Hh%M'))\n\n with open(name + \".csv\", 'w') as f:\n f.write('\"USER_ID_MOVIE_ID\",\"PREDICTED_RATING\"\\n')\n for i in range(n_elements):\n if np.isnan(y_predict[i]):\n raise ValueError('NaN detected!')\n line = '{:0.0f},{:0.0f},{}\\n'.format(user_id_test[i],movie_id_test[i],y_predict[i])\n f.write(line)\n print(\"Submission file successfully written!\")\n\nclass ModelSelection:\n def __init__(self, user_data, movie_data, aggregated_data, train_data, output_train):\n self.train = train_data\n self.users = user_data\n self.aggregated = aggregated_data\n self.movies = movie_data\n self.output_train = output_train\n\n def optimizeParametersDecisionTreeClassifier(self, nb_fold, max_depth_range):\n\n kf = KFold(n_splits=nb_fold)\n depth_n_errors = np.zeros((max_depth_range.__len__(), 2))\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][0] = depth\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for depth in max_depth_range:\n dt = DecisionTreeClassifier(max_depth=depth)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n depth_n_errors[i][1] += -scores.mean()\n i += 1\n\n i = 0\n for depth in max_depth_range:\n depth_n_errors[i][1] /= nb_fold\n i += 1\n\n best_depth = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for depth, error in depth_n_errors:\n if(error < best_error):\n best_error = error\n best_depth = depth\n\n #Recompute the error for this model on the whole data set\n dt = DecisionTreeClassifier(max_depth=best_depth)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_depth, final_error.mean()]\n\n\n def optimizeParametersKNeighborsClassifier(self, nb_fold, k_range):\n\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for k in k_range:\n dt = KNeighborsClassifier(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n\n best_k = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for k, error in k_n_errors:\n if(error < best_error):\n best_error = error\n best_k = k\n\n #Recompute the error for this model on the whole data set\n dt = KNeighborsClassifier(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_k, final_error.mean()]\n\n def optimizeParametersKNeighborsRegressor(self, nb_fold, k_range):\n\n kf = KFold(n_splits=nb_fold)\n k_n_errors = np.zeros((k_range.__len__(), 2))\n i = 0\n for k in k_range:\n k_n_errors[i][0] = k\n i += 1\n #First round of cv\n for train_index, test_index in kf.split(self.aggregated):\n #Second round of cv\n i = 0\n for k in k_range:\n dt = KNeighborsRegressor(n_neighbors=k)\n scores = cross_val_score(dt, self.aggregated[train_index], self.output_train[train_index], cv=nb_fold, scoring='neg_mean_squared_error')\n k_n_errors[i][1] += -scores.mean()\n i += 1\n\n for i in range(k_range.__len__()):\n k_n_errors[i][1] /= nb_fold\n\n best_k = 0\n best_error = 5\n #Take the best model and cross validate it on the whole data\n for k, error in k_n_errors:\n if(error < best_error):\n best_error = error\n best_k = k\n\n #Recompute the error for this model on the whole data set\n dt = KNeighborsRegressor(n_neighbors=best_k)\n final_error = -cross_val_score(dt, self.aggregated, self.output_train, cv=nb_fold, scoring='neg_mean_squared_error')\n\n return[best_k, final_error.mean()]\n\n\n\n\nusers = pd.read_csv(\"data/user_data_normalized_28-11-2016_01h32.csv\", delimiter=\",\")\nmovies = pd.read_csv(\"data/movie_data_normalized.csv\", delimiter=\",\")\ntrain = pd.read_csv(\"data/data_train.csv\", delimiter=\",\")\noutput = pd.read_csv(\"data/output_train.csv\", delimiter=\",\")[\"rating\"]\naggregated = pd.read_csv(\"data/agregated_data_28-11-2016_01h50.csv\", delimiter=\",\")\nms = ModelSelection(users.values, movies.values, aggregated.values, train.values, output)\n#print(ms.optimizeParametersDecisionTreeClassifier(5, range(2,3,1)))\nprint(ms.optimizeParametersKNeighborsClassifier(5, range(1,5,1)))\n#print(ms.optimizeParametersKNeighborsClassifier(5, range(5,10,1)))",
"step-ids": [
5,
7,
8,
9,
10
]
}
|
[
5,
7,
8,
9,
10
] |
import messages
import os
import requests
from bs4 import BeautifulSoup
URL = "https://mailman.kcl.ac.uk/mailman/"
ADMIN = "admin/"
ROSTER = "roster/"
OUTPUT_FOLDER = "../output/"
def makeoutput(path):
if os.path.exists(path):
pass
else:
os.mkdir(path)
def mailinglist_cookies(mailinglist, password): # this opens up the admin page, enters the password, and saves the returned cookie to be passed to the next request
try:
cookie_request = requests.post(URL+ ADMIN + mailinglist, data = {'adminpw':password})
cookie_request.raise_for_status()
return cookie_request.cookies
except: # raises exception if the password is incorrect (or any other 4XX error)
print(messages.error_message)
return None
def make_roster(mailinglist, cookies): # takes the cookie from the cookie request and requests the roster
roster_request = requests.get(URL+ ROSTER + mailinglist, cookies = cookies)
roster_soup = BeautifulSoup(roster_request.text,'html.parser')
roster_result_set = roster_soup.find_all('a')[:-4] # the last 4 links on the page are admin links
roster = []
for r in roster_result_set:
roster.append(r.text.replace(' at ','@')) #the mailman list inexplicably uses a stupid ' at ' display format
return roster
def main():
makeoutput(OUTPUT_FOLDER)
print(messages.welcome_message)
while True:
mailinglist = input("What's the name of the mailing list you want to download?> ")
password = input("What is the list admin password?> ")
filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'
cookies = mailinglist_cookies(mailinglist, password)
if cookies != None:
roster = make_roster(mailinglist, cookies)
for count, email in enumerate(roster,1):
print(count,"/",len(roster))
with open(filename, 'a') as output:
output.write(email + ';\n')
print("Saved", len(roster), "email addresses in", os.path.abspath(filename))
input("press enter to close")
break
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "0e337ce21450e0fdb7688183d0542ebf902a9614",
"index": 1293,
"step-1": "<mask token>\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nURL = 'https://mailman.kcl.ac.uk/mailman/'\nADMIN = 'admin/'\nROSTER = 'roster/'\nOUTPUT_FOLDER = '../output/'\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import messages\nimport os\nimport requests\nfrom bs4 import BeautifulSoup\nURL = 'https://mailman.kcl.ac.uk/mailman/'\nADMIN = 'admin/'\nROSTER = 'roster/'\nOUTPUT_FOLDER = '../output/'\n\n\ndef makeoutput(path):\n if os.path.exists(path):\n pass\n else:\n os.mkdir(path)\n\n\ndef mailinglist_cookies(mailinglist, password):\n try:\n cookie_request = requests.post(URL + ADMIN + mailinglist, data={\n 'adminpw': password})\n cookie_request.raise_for_status()\n return cookie_request.cookies\n except:\n print(messages.error_message)\n return None\n\n\ndef make_roster(mailinglist, cookies):\n roster_request = requests.get(URL + ROSTER + mailinglist, cookies=cookies)\n roster_soup = BeautifulSoup(roster_request.text, 'html.parser')\n roster_result_set = roster_soup.find_all('a')[:-4]\n roster = []\n for r in roster_result_set:\n roster.append(r.text.replace(' at ', '@'))\n return roster\n\n\ndef main():\n makeoutput(OUTPUT_FOLDER)\n print(messages.welcome_message)\n while True:\n mailinglist = input(\n \"What's the name of the mailing list you want to download?> \")\n password = input('What is the list admin password?> ')\n filename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n cookies = mailinglist_cookies(mailinglist, password)\n if cookies != None:\n roster = make_roster(mailinglist, cookies)\n for count, email in enumerate(roster, 1):\n print(count, '/', len(roster))\n with open(filename, 'a') as output:\n output.write(email + ';\\n')\n print('Saved', len(roster), 'email addresses in', os.path.\n abspath(filename))\n input('press enter to close')\n break\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\nimport messages\nimport os\nimport requests\nfrom bs4 import BeautifulSoup\n\nURL = \"https://mailman.kcl.ac.uk/mailman/\"\nADMIN = \"admin/\"\nROSTER = \"roster/\"\nOUTPUT_FOLDER = \"../output/\"\n\ndef makeoutput(path):\t\n\tif os.path.exists(path):\n\t\tpass\n\telse:\n\t\tos.mkdir(path)\n\ndef mailinglist_cookies(mailinglist, password): # this opens up the admin page, enters the password, and saves the returned cookie to be passed to the next request\n\ttry:\n\t\tcookie_request = requests.post(URL+ ADMIN + mailinglist, data = {'adminpw':password})\n\t\tcookie_request.raise_for_status()\n\t\treturn cookie_request.cookies \n\texcept: # raises exception if the password is incorrect (or any other 4XX error)\n\t\tprint(messages.error_message)\n\t\treturn None\n\ndef make_roster(mailinglist, cookies): # takes the cookie from the cookie request and requests the roster\n\troster_request = requests.get(URL+ ROSTER + mailinglist, cookies = cookies)\n\troster_soup = BeautifulSoup(roster_request.text,'html.parser')\n\troster_result_set = roster_soup.find_all('a')[:-4] # the last 4 links on the page are admin links\n\troster = []\n\tfor r in roster_result_set:\n\t\troster.append(r.text.replace(' at ','@')) #the mailman list inexplicably uses a stupid ' at ' display format\n\n\treturn roster\n\ndef main():\n\t\n\tmakeoutput(OUTPUT_FOLDER)\t\n\tprint(messages.welcome_message)\t\n\n\twhile True:\t\t\n\t\tmailinglist = input(\"What's the name of the mailing list you want to download?> \")\n\t\tpassword = input(\"What is the list admin password?> \")\n\t\tfilename = OUTPUT_FOLDER + mailinglist + '-mailinglist.txt'\n\n\t\tcookies = mailinglist_cookies(mailinglist, password)\n\t\tif cookies != None:\n\t\t\troster = make_roster(mailinglist, cookies)\t\t\n\t\t\tfor count, email in enumerate(roster,1):\n\t\t\t\t\n\t\t\t\tprint(count,\"/\",len(roster))\n\n\t\t\t\twith open(filename, 'a') as output:\n\t\t\t\t\toutput.write(email + ';\\n')\n\t\t\t\n\t\t\tprint(\"Saved\", len(roster), \"email addresses in\", os.path.abspath(filename))\n\t\t\tinput(\"press enter to close\")\n\t\t\tbreak\t\t\n\nif __name__ == '__main__':\n\tmain()",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
"""This module contains a class supporting composition of AugraphyPipelines"""
class ComposePipelines:
"""The composition of multiple AugraphyPipelines.
Define AugraphyPipelines elsewhere, then use this to compose them.
ComposePipelines objects are callable on images (as numpy.ndarrays).
:param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.
:type pipelines: list or tuple
"""
def __init__(self, pipelines):
self.pipelines = pipelines
def __call__(self, image):
augmented_image = image.copy()
newpipeline = dict()
for i, pipeline in enumerate(self.pipelines):
data_output = pipeline.augment(augmented_image)
augmented_image = data_output["output"]
for key in data_output.keys():
newkey = "pipeline" + str(i) + "-" + key
newpipeline[newkey] = data_output[key]
return newpipeline
|
normal
|
{
"blob_id": "13c55c313c740edce48fc979e8956fdd018e8aab",
"index": 9716,
"step-1": "<mask token>\n\n\nclass ComposePipelines:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ComposePipelines:\n <mask token>\n <mask token>\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"step-3": "<mask token>\n\n\nclass ComposePipelines:\n <mask token>\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"step-4": "<mask token>\n\n\nclass ComposePipelines:\n \"\"\"The composition of multiple AugraphyPipelines.\n Define AugraphyPipelines elsewhere, then use this to compose them.\n ComposePipelines objects are callable on images (as numpy.ndarrays).\n\n :param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.\n :type pipelines: list or tuple\n \"\"\"\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n augmented_image = image.copy()\n newpipeline = dict()\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output['output']\n for key in data_output.keys():\n newkey = 'pipeline' + str(i) + '-' + key\n newpipeline[newkey] = data_output[key]\n return newpipeline\n",
"step-5": "\"\"\"This module contains a class supporting composition of AugraphyPipelines\"\"\"\n\n\nclass ComposePipelines:\n \"\"\"The composition of multiple AugraphyPipelines.\n Define AugraphyPipelines elsewhere, then use this to compose them.\n ComposePipelines objects are callable on images (as numpy.ndarrays).\n\n :param pipelines: A list contains multiple augraphy.base.AugraphyPipeline.\n :type pipelines: list or tuple\n \"\"\"\n\n def __init__(self, pipelines):\n self.pipelines = pipelines\n\n def __call__(self, image):\n\n augmented_image = image.copy()\n newpipeline = dict()\n\n for i, pipeline in enumerate(self.pipelines):\n data_output = pipeline.augment(augmented_image)\n augmented_image = data_output[\"output\"]\n\n for key in data_output.keys():\n newkey = \"pipeline\" + str(i) + \"-\" + key\n newpipeline[newkey] = data_output[key]\n\n return newpipeline\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@synchronized
def start():
startService(command='/opt/icecream/sbin/iceccd', args=
'-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)
<|reserved_special_token_0|>
def status():
return isServiceRunning('/var/run/iceccd.pid')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@synchronized
def start():
startService(command='/opt/icecream/sbin/iceccd', args=
'-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)
@synchronized
def stop():
stopService(pidfile='/var/run/iceccd.pid', donotify=True)
def status():
return isServiceRunning('/var/run/iceccd.pid')
<|reserved_special_token_1|>
serviceType = 'server'
serviceDesc = _({'en': 'Icecream Daemon', 'tr': 'Icecream Servisi'})
<|reserved_special_token_0|>
@synchronized
def start():
startService(command='/opt/icecream/sbin/iceccd', args=
'-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)
@synchronized
def stop():
stopService(pidfile='/var/run/iceccd.pid', donotify=True)
def status():
return isServiceRunning('/var/run/iceccd.pid')
<|reserved_special_token_1|>
serviceType = 'server'
serviceDesc = _({'en': 'Icecream Daemon', 'tr': 'Icecream Servisi'})
from comar.service import *
@synchronized
def start():
startService(command='/opt/icecream/sbin/iceccd', args=
'-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)
@synchronized
def stop():
stopService(pidfile='/var/run/iceccd.pid', donotify=True)
def status():
return isServiceRunning('/var/run/iceccd.pid')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
serviceType = "server"
serviceDesc = _({"en": "Icecream Daemon",
"tr": "Icecream Servisi"})
from comar.service import *
@synchronized
def start():
startService(command="/opt/icecream/sbin/iceccd",
args="-d -m 5 > /dev/null",
pidfile="/var/run/iceccd.pid",
donotify=True)
@synchronized
def stop():
stopService(pidfile="/var/run/iceccd.pid",
donotify=True)
def status():
return isServiceRunning("/var/run/iceccd.pid")
|
flexible
|
{
"blob_id": "e3603d90bd5aa5de40baa27b62acf6f71eff9f6c",
"index": 6827,
"step-1": "<mask token>\n\n\n@synchronized\ndef start():\n startService(command='/opt/icecream/sbin/iceccd', args=\n '-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)\n\n\n<mask token>\n\n\ndef status():\n return isServiceRunning('/var/run/iceccd.pid')\n",
"step-2": "<mask token>\n\n\n@synchronized\ndef start():\n startService(command='/opt/icecream/sbin/iceccd', args=\n '-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)\n\n\n@synchronized\ndef stop():\n stopService(pidfile='/var/run/iceccd.pid', donotify=True)\n\n\ndef status():\n return isServiceRunning('/var/run/iceccd.pid')\n",
"step-3": "serviceType = 'server'\nserviceDesc = _({'en': 'Icecream Daemon', 'tr': 'Icecream Servisi'})\n<mask token>\n\n\n@synchronized\ndef start():\n startService(command='/opt/icecream/sbin/iceccd', args=\n '-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)\n\n\n@synchronized\ndef stop():\n stopService(pidfile='/var/run/iceccd.pid', donotify=True)\n\n\ndef status():\n return isServiceRunning('/var/run/iceccd.pid')\n",
"step-4": "serviceType = 'server'\nserviceDesc = _({'en': 'Icecream Daemon', 'tr': 'Icecream Servisi'})\nfrom comar.service import *\n\n\n@synchronized\ndef start():\n startService(command='/opt/icecream/sbin/iceccd', args=\n '-d -m 5 > /dev/null', pidfile='/var/run/iceccd.pid', donotify=True)\n\n\n@synchronized\ndef stop():\n stopService(pidfile='/var/run/iceccd.pid', donotify=True)\n\n\ndef status():\n return isServiceRunning('/var/run/iceccd.pid')\n",
"step-5": "# -*- coding: utf-8 -*-\nserviceType = \"server\"\nserviceDesc = _({\"en\": \"Icecream Daemon\",\n \"tr\": \"Icecream Servisi\"})\n\nfrom comar.service import *\n\n@synchronized\ndef start():\n startService(command=\"/opt/icecream/sbin/iceccd\",\n args=\"-d -m 5 > /dev/null\",\n pidfile=\"/var/run/iceccd.pid\",\n donotify=True)\n\n@synchronized\ndef stop():\n stopService(pidfile=\"/var/run/iceccd.pid\",\n donotify=True)\n\ndef status():\n return isServiceRunning(\"/var/run/iceccd.pid\")\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import numpy as np
import pandas as pd
from unrar import rarfile
import numpy as np
import pandas as pd
import tushare as ts
import os
year_month='201911'
contract_kind='NI'
rar_data_file_path='C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/'
main_code_path='C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/main/main_/'
clean_data_path='D:/1_min补充统一/'
end_date='20200107'
time_range_path='D:/统一所有品种时间范围.csv'
# save_month_fill_data_path='D:/1_min补充统一/'+contract_kind+'主力连续'+'_'+month+'.csv'
def renew_commodity_future(year_month:str,contract_kind:str,main_code_path:str,rar_data_file_path:str,clean_data_path:str,time_range_path:str,end_date:str,commodity_bool=True):
'''
用于更新月度的商品期货数据
year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911
contract_kind:放对应品种的list 类似['A','B']
main_code_path:对应存放主力合约的地方
rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名
clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处
time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv
end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动
commodity_bool:商品期货对应True,金融期货False,默认商品期货
'''
month=year_month
if commodity_bool:
file_name=rar_data_file_path+'FutAC_Min1_Std_'+month+'.rar'
else:
file_name=rar_data_file_path+'FutSF_Min1_Std_'+month+'.rar'
orignial_path=main_code_path
specifi_path=orignial_path+contract_kind+'_1day_main.npy'
rar = rarfile.RarFile(file_name,pwd='www.jinshuyuan.net')
# 原始的处理好的数据
orignal_clean_csv_path=clean_data_path
pwd='www.jinshuyuan.net'
data=np.load(specifi_path)
time_0931_15=pd.read_csv(time_range_path)['date'].values.tolist()
rar.extractall(path=file_name.split('.')[0])
# 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐
# 按照月份确定位置
pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)
date_df=date_df.loc[date_df['is_open']==1]
date_list=date_df['cal_date'].tolist()
# ==========================================================================
# 针对的是201911月数据,对应的合约index 放在 target_date_index中
date_df=pd.DataFrame({'date':date_list})
date_df['month']=date_df['date'].str[:6]
target_date=date_df.loc[date_df['month']==month]
target_date_index=target_date.index.values
target_date=target_date['date'].values
# 获取对应目标
data=data.reshape(-1)
contract_main_pool=data[target_date_index]
# 去掉交易所的代码编号
contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values
file_pools=os.listdir(file_name.split('.')[0])
# 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断
if contract_main_pool[0] not in file_pools:
contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]
if contract_main_pool[0] not in file_pools:
print(f'找不到{contract_main_pool[0]}')
# 读取好所有的路径
contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values
# (len(target_date),contract_main_pool.shape[0])
row_1=['市场代码','合约代码', '时间', '开','高', '低', '收', '成交量', '成交额', '持仓量']
orignal_data=[]
orignal_data.append(row_1)
for index in range(len(target_date)):
date=target_date[index]
one_file_path=contract_main_pool[index]
df=pd.read_csv(one_file_path,encoding='gbk')
df['date']=df['时间'].str[:10]
df['date2']=df['date'].str.replace('-','')
result=df.loc[df['date2']==date]
if result.shape[0]>0:
for row_index in range(len(result)):
target_row=result.iloc[row_index].tolist()
clean_row=target_row[:-2]
orignal_data.append(clean_row)
print(f'{contract_kind} {date} finished!')
else:
print(f'没找到合约品种{contract_kind}在{date}')
print(f'{contract_kind}在{month}月的主力合约数据读取完成')
final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])
final_df['date']=final_df['时间'].str[:10]
final_df_date=final_df['date'].unique()
final_df['date']=final_df['时间'].str[:10]
final_df['time']=final_df['时间'].str[10:].str.strip()
final_df['时间']=final_df['date']+' '+final_df['time']
final_df=final_df.sort_values('时间')
final_df['合约代码']=final_df['合约代码'].str.upper()
final_df=final_df.sort_values('时间')
# ===============================增加了从constant_time进行截取================================
final_df['transf_date']=pd.to_datetime(final_df['date'])
final_df.set_index('transf_date',inplace=True)
combine_all_df=pd.DataFrame()
final_df['date2']=final_df['date'].str.replace('-','')
# 按月进行填充
# 设置了存放按月填充的路径
for date_index in range(len(target_date)):
#按日期进行分割
target_df=final_df.loc[final_df['date2']==target_date[date_index]]
#分割到的长度放入容器中
target_num=len(target_df)
#理论长度
theory_num=len(time_0931_15)
#实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的
if target_num>0:
#开始区分2,3情况
have_time=target_df['time'].values.tolist()
lack_time=[x for x in time_0931_15 if x not in have_time]
#检查是不是情况2
if lack_time:
print(f'{target_date[date_index]} 不连续')
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充
if len(lack_time)<len(time_0931_15):
insert_df['合约代码']=target_df['合约代码'].unique()[-1]
#生成一天完整的数据
combine_insert_df=pd.concat([target_df,insert_df])
#将数据添加到容器中
combine_all_df=pd.concat([combine_all_df,combine_insert_df])
#完全没有数据,直接填充
else:
print(f'{target_date[date_index]}empty ')
lack_time=[x for x in time_0931_15]
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#将数据添加到容器
combine_all_df=pd.concat([combine_all_df,insert_df])
combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']
#调整时间
combine_all_df=combine_all_df.sort_values('时间')
combine_all_df.reset_index(inplace=True)
#数据输出,按设定的顺序
combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量','date','time']]
combine_all_df['时间']=combine_all_df['时间'].str.replace('-','')
combine_all_df['date']=combine_all_df['date'].str.replace('-','')
# combine_all_df.to_csv(save_month_fill_data_path,index=False,encoding='utf-8-sig')
# ==========================储存数据=================================================
combine_df=combine_all_df.copy()
contract_type=contract_kind
combine_df=combine_df.sort_values('时间')
# ====================================================================开始截取============================================================
# end_time+1其实是可以作为每次截取的起点,终点下一个就是起点,不过要加上0,而终点的位置也可以是end_time+1,因为end_time+1只能取end_time
# 按照下午15:15统一截取
end_time='15:15:00'
end_index=np.where(combine_df['time']==end_time)[0]+1
end_index=np.hstack(([0],end_index))
start=end_index[:-1]
end=end_index[1:]
# ================================================================缺失第一个交易日前一天的夜盘数据==========================================
# 这里的选择构造一个虚拟的时间戳,来满足缺失的夜盘数据
# 按照上一步的截取方法,第一个交易日缺少前一天的夜盘数据
last_day=date_df['date'].iloc[target_date_index[0]-1]
last_day=last_day[:4]+'-'+last_day[4:6]+'-'+last_day[6:]
first_day_have=combine_df[start[0]:end[0]]['time'].values
full_time=combine_df['time'].unique()
full_time.sort()
first_day_lack=[x for x in full_time[-179:]]
first_day_lack.sort()
lack_array=np.empty(shape=(len(first_day_lack),12))
lack_array.fill(np.nan)
# ===============================准备缺失部分df==========================================================================================
first_day_lack_df=pd.DataFrame(lack_array,columns=combine_df.columns)
first_day_lack_df['time']=first_day_lack
first_day_lack_df['date']=last_day
first_day_lack_df['时间']=first_day_lack_df['date']+' '+first_day_lack_df['time']
last_df=pd.read_csv(contract_main_pool[0],encoding='gbk')
# 确定之前的有没有夜盘
last_df['date']=last_df['时间'].str[:10]
last_df['time']=last_df['时间'].str[11:]
# 补夜盘数据
last_time_pool=last_df.loc[last_df['date']==last_day]['time'].values
last_day_have_date=[]
# 说明在上个交易日有数据
if last_time_pool.shape[0]>0:
print(f'期货品种{contract_kind}在前一个交易日{last_day}有夜盘数据,需要读取覆盖')
last_day_have_date=[x for x in last_time_pool]
if last_day_have_date:
for index in range(len(last_day_have_date)):
origanl_index=last_df.loc[(last_df['date']==last_day)&(last_df['time']==last_day_have_date[index])].index[0]
target_index=first_day_lack_df.loc[first_day_lack_df['time']==last_day_have_date[index]].index[0]
first_day_lack_df.iloc[target_index]=last_df.iloc[origanl_index]
else:
print(f'期货品种{contract_kind}在前一个交易日{last_day}没有夜盘数据,不需要读取覆盖')
print('直接使用np.nan填充上一个交易日的夜盘数据')
for index in range(first_day_lack_df.shape[0]):
combine_df=combine_df.append(first_day_lack_df.iloc[index])
combine_df['时间']=combine_df['时间'].str.replace('-','')
combine_df['date']=combine_df['date'].str.replace('-','')
combine_df.sort_values('时间',inplace=True)
# =================================缺失部分填充=========================================================================================
# combine_df=pd.concat([first_day_lack_df,combine_df])
# # ================================重新按时间排序========================================================================================
# combine_df=combine_df.sort_values('时间')
# ============================重新进行切割===============================================================================================
end_index=np.where(combine_df['time']==end_time)[0]+1
end_index=np.hstack(([0],end_index))
start=end_index[:-1]
end=end_index[1:]
# ==============================进行分割按照特定时间,明确col===============================================================================
col_type_list=['开','高','低','收','成交量','成交额','持仓量']
dir_name_list=['open','high','low','close','volume','amount','position']
#这个变量现在没有用
#交易到凌晨01
#merge_df=pd.DataFrame({'time':with_night_01})
#交易到凌晨0230,version中没有集合竞价时间,time_0931_15去掉9:00,21:00
merge_df=pd.DataFrame({'time':time_0931_15})
combine_df['date']=combine_df['时间'].str[:8]
for index in range(len(col_type_list)):
col_type=col_type_list[index]
# 用来接收分col数据的容器
csv_df=pd.DataFrame()
for s_index,e_index in zip(start,end):
# =========================================截取每个交易日数据==============================================================================
res=combine_df.iloc[s_index:e_index,:]
one_date_df=pd.DataFrame(res[col_type].values.reshape(1,-1),columns=res['time'].values.tolist())
one_date_df['main_contract_code']=res.iloc[-1]['合约代码']
one_date_df['date']=res.iloc[-1]['date']
# =======================================设置输出格式====================================================================================
col_layout=['date']
col_layout=np.hstack((col_layout,res['time'].values.tolist()))
col_layout=np.hstack((col_layout,['main_contract_code']))
one_date_df=one_date_df[col_layout]
# =======================================合并数据========================================================================================
csv_df=pd.concat([csv_df,one_date_df])
# ========================追加原始数据=======================================
# 时间问题需要处理,不然对不齐
# 在测试文件中测试,所以修改了路径
orignal_csv_df=pd.read_csv(orignal_clean_csv_path+contract_kind+'_1min_'+dir_name_list[index]+'.csv')
column_ouput_form=orignal_csv_df.columns.values
orignal_date_pool=pd.to_datetime(orignal_csv_df['date'],format='%Y-%m-%d').values
current_date_pool=pd.to_datetime(csv_df['date'],format='%Y-%m-%d').values
orignal_csv_df['date']=pd.to_datetime(orignal_csv_df['date'],format='%Y-%m-%d').dt.strftime('%Y-%m-%d')
csv_df['date']=pd.to_datetime(csv_df['date'],format='%Y%m%d').dt.strftime('%Y-%m-%d')
# check代码中的数字个数等于四个
main_code=csv_df['main_contract_code'].iloc[0]
main_code_num=csv_df['main_contract_code'].str.findall(r'[0-9]+').iloc[0][0]
if len(main_code_num)==3:
print(f'合约代码{main_code}缺少一位数字,将被替换')
csv_df['main_contract_code']=csv_df['main_contract_code'].str[:2]+month[0]+csv_df['main_contract_code'].str[2:]
main_code=csv_df['main_contract_code'].iloc[0]
print(f'合约代码{main_code}')
# 查看有没有交集,如果有交集会停止,说明进行了重复操作
intersection_pool=[date for date in orignal_date_pool if date in current_date_pool]
if not intersection_pool:
print(f'新旧数据没有时间交集,{contract_kind} {dir_name_list[index]} 将被添加到先前数据中')
orignal_csv_df=pd.concat([orignal_csv_df,csv_df])
orignal_csv_df.sort_values('date',inplace=True)
orignal_csv_df=orignal_csv_df[column_ouput_form]
orignal_csv_df.to_csv(orignal_clean_csv_path+contract_kind+'_1min_'+dir_name_list[index]+'.csv',index=False)
print(f'期货品种{contract_kind} {dir_name_list[index]} 完成')
else:
print(f'新旧数据的时间出现交集!!{contract_kind} {dir_name_list[index]} 将不会被添加到先前数据中')
|
normal
|
{
"blob_id": "1c2967c26c845281ceb46cc1d8c06768298ef6b6",
"index": 9407,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef renew_commodity_future(year_month: str, contract_kind: str,\n main_code_path: str, rar_data_file_path: str, clean_data_path: str,\n time_range_path: str, end_date: str, commodity_bool=True):\n \"\"\"\n 用于更新月度的商品期货数据\n year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911\n contract_kind:放对应品种的list 类似['A','B']\n main_code_path:对应存放主力合约的地方\n rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名\n clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处\n time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv\n end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动\n commodity_bool:商品期货对应True,金融期货False,默认商品期货\n \"\"\"\n month = year_month\n if commodity_bool:\n file_name = rar_data_file_path + 'FutAC_Min1_Std_' + month + '.rar'\n else:\n file_name = rar_data_file_path + 'FutSF_Min1_Std_' + month + '.rar'\n orignial_path = main_code_path\n specifi_path = orignial_path + contract_kind + '_1day_main.npy'\n rar = rarfile.RarFile(file_name, pwd='www.jinshuyuan.net')\n orignal_clean_csv_path = clean_data_path\n pwd = 'www.jinshuyuan.net'\n data = np.load(specifi_path)\n time_0931_15 = pd.read_csv(time_range_path)['date'].values.tolist()\n rar.extractall(path=file_name.split('.')[0])\n pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6'\n )\n ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')\n date_df = pro.trade_cal(exchange='DCE', start_date='20100101', end_date\n =end_date)\n date_df = date_df.loc[date_df['is_open'] == 1]\n date_list = date_df['cal_date'].tolist()\n date_df = pd.DataFrame({'date': date_list})\n date_df['month'] = date_df['date'].str[:6]\n target_date = date_df.loc[date_df['month'] == month]\n target_date_index = target_date.index.values\n target_date = target_date['date'].values\n data = data.reshape(-1)\n contract_main_pool = data[target_date_index]\n contract_main_pool = (pd.Series(contract_main_pool).str.split('.').str[\n 0] + '.csv').values\n file_pools = os.listdir(file_name.split('.')[0])\n if contract_main_pool[0] not in file_pools:\n contract_main_pool = [contract_file.lower() for contract_file in\n contract_main_pool]\n if contract_main_pool[0] not in file_pools:\n print(f'找不到{contract_main_pool[0]}')\n contract_main_pool = (file_name.split('.')[0] + '/' + pd.Series(\n contract_main_pool)).values\n row_1 = ['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量']\n orignal_data = []\n orignal_data.append(row_1)\n for index in range(len(target_date)):\n date = target_date[index]\n one_file_path = contract_main_pool[index]\n df = pd.read_csv(one_file_path, encoding='gbk')\n df['date'] = df['时间'].str[:10]\n df['date2'] = df['date'].str.replace('-', '')\n result = df.loc[df['date2'] == date]\n if result.shape[0] > 0:\n for row_index in range(len(result)):\n target_row = result.iloc[row_index].tolist()\n clean_row = target_row[:-2]\n orignal_data.append(clean_row)\n print(f'{contract_kind} {date} finished!')\n else:\n print(f'没找到合约品种{contract_kind}在{date}')\n print(f'{contract_kind}在{month}月的主力合约数据读取完成')\n final_df = pd.DataFrame(orignal_data[1:], columns=orignal_data[0])\n final_df['date'] = final_df['时间'].str[:10]\n final_df_date = final_df['date'].unique()\n final_df['date'] = final_df['时间'].str[:10]\n final_df['time'] = final_df['时间'].str[10:].str.strip()\n final_df['时间'] = final_df['date'] + ' ' + final_df['time']\n final_df = final_df.sort_values('时间')\n final_df['合约代码'] = final_df['合约代码'].str.upper()\n final_df = final_df.sort_values('时间')\n final_df['transf_date'] = pd.to_datetime(final_df['date'])\n final_df.set_index('transf_date', inplace=True)\n combine_all_df = pd.DataFrame()\n final_df['date2'] = final_df['date'].str.replace('-', '')\n for date_index in range(len(target_date)):\n target_df = final_df.loc[final_df['date2'] == target_date[date_index]]\n target_num = len(target_df)\n theory_num = len(time_0931_15)\n if target_num > 0:\n have_time = target_df['time'].values.tolist()\n lack_time = [x for x in time_0931_15 if x not in have_time]\n if lack_time:\n print(f'{target_date[date_index]} 不连续')\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n if len(lack_time) < len(time_0931_15):\n insert_df['合约代码'] = target_df['合约代码'].unique()[-1]\n combine_insert_df = pd.concat([target_df, insert_df])\n combine_all_df = pd.concat([combine_all_df, combine_insert_df])\n else:\n print(f'{target_date[date_index]}empty ')\n lack_time = [x for x in time_0931_15]\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n combine_all_df = pd.concat([combine_all_df, insert_df])\n combine_all_df['时间'] = combine_all_df['date'] + ' ' + combine_all_df['time'\n ]\n combine_all_df = combine_all_df.sort_values('时间')\n combine_all_df.reset_index(inplace=True)\n combine_all_df = combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低',\n '收', '成交量', '成交额', '持仓量', 'date', 'time']]\n combine_all_df['时间'] = combine_all_df['时间'].str.replace('-', '')\n combine_all_df['date'] = combine_all_df['date'].str.replace('-', '')\n combine_df = combine_all_df.copy()\n contract_type = contract_kind\n combine_df = combine_df.sort_values('时间')\n end_time = '15:15:00'\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n last_day = date_df['date'].iloc[target_date_index[0] - 1]\n last_day = last_day[:4] + '-' + last_day[4:6] + '-' + last_day[6:]\n first_day_have = combine_df[start[0]:end[0]]['time'].values\n full_time = combine_df['time'].unique()\n full_time.sort()\n first_day_lack = [x for x in full_time[-179:]]\n first_day_lack.sort()\n lack_array = np.empty(shape=(len(first_day_lack), 12))\n lack_array.fill(np.nan)\n first_day_lack_df = pd.DataFrame(lack_array, columns=combine_df.columns)\n first_day_lack_df['time'] = first_day_lack\n first_day_lack_df['date'] = last_day\n first_day_lack_df['时间'] = first_day_lack_df['date'\n ] + ' ' + first_day_lack_df['time']\n last_df = pd.read_csv(contract_main_pool[0], encoding='gbk')\n last_df['date'] = last_df['时间'].str[:10]\n last_df['time'] = last_df['时间'].str[11:]\n last_time_pool = last_df.loc[last_df['date'] == last_day]['time'].values\n last_day_have_date = []\n if last_time_pool.shape[0] > 0:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}有夜盘数据,需要读取覆盖')\n last_day_have_date = [x for x in last_time_pool]\n if last_day_have_date:\n for index in range(len(last_day_have_date)):\n origanl_index = last_df.loc[(last_df['date'] == last_day) & (\n last_df['time'] == last_day_have_date[index])].index[0]\n target_index = first_day_lack_df.loc[first_day_lack_df['time'] ==\n last_day_have_date[index]].index[0]\n first_day_lack_df.iloc[target_index] = last_df.iloc[origanl_index]\n else:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}没有夜盘数据,不需要读取覆盖')\n print('直接使用np.nan填充上一个交易日的夜盘数据')\n for index in range(first_day_lack_df.shape[0]):\n combine_df = combine_df.append(first_day_lack_df.iloc[index])\n combine_df['时间'] = combine_df['时间'].str.replace('-', '')\n combine_df['date'] = combine_df['date'].str.replace('-', '')\n combine_df.sort_values('时间', inplace=True)\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n col_type_list = ['开', '高', '低', '收', '成交量', '成交额', '持仓量']\n dir_name_list = ['open', 'high', 'low', 'close', 'volume', 'amount',\n 'position']\n merge_df = pd.DataFrame({'time': time_0931_15})\n combine_df['date'] = combine_df['时间'].str[:8]\n for index in range(len(col_type_list)):\n col_type = col_type_list[index]\n csv_df = pd.DataFrame()\n for s_index, e_index in zip(start, end):\n res = combine_df.iloc[s_index:e_index, :]\n one_date_df = pd.DataFrame(res[col_type].values.reshape(1, -1),\n columns=res['time'].values.tolist())\n one_date_df['main_contract_code'] = res.iloc[-1]['合约代码']\n one_date_df['date'] = res.iloc[-1]['date']\n col_layout = ['date']\n col_layout = np.hstack((col_layout, res['time'].values.tolist()))\n col_layout = np.hstack((col_layout, ['main_contract_code']))\n one_date_df = one_date_df[col_layout]\n csv_df = pd.concat([csv_df, one_date_df])\n orignal_csv_df = pd.read_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv')\n column_ouput_form = orignal_csv_df.columns.values\n orignal_date_pool = pd.to_datetime(orignal_csv_df['date'], format=\n '%Y-%m-%d').values\n current_date_pool = pd.to_datetime(csv_df['date'], format='%Y-%m-%d'\n ).values\n orignal_csv_df['date'] = pd.to_datetime(orignal_csv_df['date'],\n format='%Y-%m-%d').dt.strftime('%Y-%m-%d')\n csv_df['date'] = pd.to_datetime(csv_df['date'], format='%Y%m%d'\n ).dt.strftime('%Y-%m-%d')\n main_code = csv_df['main_contract_code'].iloc[0]\n main_code_num = csv_df['main_contract_code'].str.findall('[0-9]+'\n ).iloc[0][0]\n if len(main_code_num) == 3:\n print(f'合约代码{main_code}缺少一位数字,将被替换')\n csv_df['main_contract_code'] = csv_df['main_contract_code'].str[:2\n ] + month[0] + csv_df['main_contract_code'].str[2:]\n main_code = csv_df['main_contract_code'].iloc[0]\n print(f'合约代码{main_code}')\n intersection_pool = [date for date in orignal_date_pool if date in\n current_date_pool]\n if not intersection_pool:\n print(\n f'新旧数据没有时间交集,{contract_kind} {dir_name_list[index]} 将被添加到先前数据中'\n )\n orignal_csv_df = pd.concat([orignal_csv_df, csv_df])\n orignal_csv_df.sort_values('date', inplace=True)\n orignal_csv_df = orignal_csv_df[column_ouput_form]\n orignal_csv_df.to_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv', index=False)\n print(f'期货品种{contract_kind} {dir_name_list[index]} 完成')\n else:\n print(\n f'新旧数据的时间出现交集!!{contract_kind} {dir_name_list[index]} 将不会被添加到先前数据中'\n )\n",
"step-3": "<mask token>\nyear_month = '201911'\ncontract_kind = 'NI'\nrar_data_file_path = (\n 'C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/'\n )\nmain_code_path = (\n 'C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/main/main_/'\n )\nclean_data_path = 'D:/1_min补充统一/'\nend_date = '20200107'\ntime_range_path = 'D:/统一所有品种时间范围.csv'\n\n\ndef renew_commodity_future(year_month: str, contract_kind: str,\n main_code_path: str, rar_data_file_path: str, clean_data_path: str,\n time_range_path: str, end_date: str, commodity_bool=True):\n \"\"\"\n 用于更新月度的商品期货数据\n year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911\n contract_kind:放对应品种的list 类似['A','B']\n main_code_path:对应存放主力合约的地方\n rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名\n clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处\n time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv\n end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动\n commodity_bool:商品期货对应True,金融期货False,默认商品期货\n \"\"\"\n month = year_month\n if commodity_bool:\n file_name = rar_data_file_path + 'FutAC_Min1_Std_' + month + '.rar'\n else:\n file_name = rar_data_file_path + 'FutSF_Min1_Std_' + month + '.rar'\n orignial_path = main_code_path\n specifi_path = orignial_path + contract_kind + '_1day_main.npy'\n rar = rarfile.RarFile(file_name, pwd='www.jinshuyuan.net')\n orignal_clean_csv_path = clean_data_path\n pwd = 'www.jinshuyuan.net'\n data = np.load(specifi_path)\n time_0931_15 = pd.read_csv(time_range_path)['date'].values.tolist()\n rar.extractall(path=file_name.split('.')[0])\n pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6'\n )\n ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')\n date_df = pro.trade_cal(exchange='DCE', start_date='20100101', end_date\n =end_date)\n date_df = date_df.loc[date_df['is_open'] == 1]\n date_list = date_df['cal_date'].tolist()\n date_df = pd.DataFrame({'date': date_list})\n date_df['month'] = date_df['date'].str[:6]\n target_date = date_df.loc[date_df['month'] == month]\n target_date_index = target_date.index.values\n target_date = target_date['date'].values\n data = data.reshape(-1)\n contract_main_pool = data[target_date_index]\n contract_main_pool = (pd.Series(contract_main_pool).str.split('.').str[\n 0] + '.csv').values\n file_pools = os.listdir(file_name.split('.')[0])\n if contract_main_pool[0] not in file_pools:\n contract_main_pool = [contract_file.lower() for contract_file in\n contract_main_pool]\n if contract_main_pool[0] not in file_pools:\n print(f'找不到{contract_main_pool[0]}')\n contract_main_pool = (file_name.split('.')[0] + '/' + pd.Series(\n contract_main_pool)).values\n row_1 = ['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量']\n orignal_data = []\n orignal_data.append(row_1)\n for index in range(len(target_date)):\n date = target_date[index]\n one_file_path = contract_main_pool[index]\n df = pd.read_csv(one_file_path, encoding='gbk')\n df['date'] = df['时间'].str[:10]\n df['date2'] = df['date'].str.replace('-', '')\n result = df.loc[df['date2'] == date]\n if result.shape[0] > 0:\n for row_index in range(len(result)):\n target_row = result.iloc[row_index].tolist()\n clean_row = target_row[:-2]\n orignal_data.append(clean_row)\n print(f'{contract_kind} {date} finished!')\n else:\n print(f'没找到合约品种{contract_kind}在{date}')\n print(f'{contract_kind}在{month}月的主力合约数据读取完成')\n final_df = pd.DataFrame(orignal_data[1:], columns=orignal_data[0])\n final_df['date'] = final_df['时间'].str[:10]\n final_df_date = final_df['date'].unique()\n final_df['date'] = final_df['时间'].str[:10]\n final_df['time'] = final_df['时间'].str[10:].str.strip()\n final_df['时间'] = final_df['date'] + ' ' + final_df['time']\n final_df = final_df.sort_values('时间')\n final_df['合约代码'] = final_df['合约代码'].str.upper()\n final_df = final_df.sort_values('时间')\n final_df['transf_date'] = pd.to_datetime(final_df['date'])\n final_df.set_index('transf_date', inplace=True)\n combine_all_df = pd.DataFrame()\n final_df['date2'] = final_df['date'].str.replace('-', '')\n for date_index in range(len(target_date)):\n target_df = final_df.loc[final_df['date2'] == target_date[date_index]]\n target_num = len(target_df)\n theory_num = len(time_0931_15)\n if target_num > 0:\n have_time = target_df['time'].values.tolist()\n lack_time = [x for x in time_0931_15 if x not in have_time]\n if lack_time:\n print(f'{target_date[date_index]} 不连续')\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n if len(lack_time) < len(time_0931_15):\n insert_df['合约代码'] = target_df['合约代码'].unique()[-1]\n combine_insert_df = pd.concat([target_df, insert_df])\n combine_all_df = pd.concat([combine_all_df, combine_insert_df])\n else:\n print(f'{target_date[date_index]}empty ')\n lack_time = [x for x in time_0931_15]\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n combine_all_df = pd.concat([combine_all_df, insert_df])\n combine_all_df['时间'] = combine_all_df['date'] + ' ' + combine_all_df['time'\n ]\n combine_all_df = combine_all_df.sort_values('时间')\n combine_all_df.reset_index(inplace=True)\n combine_all_df = combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低',\n '收', '成交量', '成交额', '持仓量', 'date', 'time']]\n combine_all_df['时间'] = combine_all_df['时间'].str.replace('-', '')\n combine_all_df['date'] = combine_all_df['date'].str.replace('-', '')\n combine_df = combine_all_df.copy()\n contract_type = contract_kind\n combine_df = combine_df.sort_values('时间')\n end_time = '15:15:00'\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n last_day = date_df['date'].iloc[target_date_index[0] - 1]\n last_day = last_day[:4] + '-' + last_day[4:6] + '-' + last_day[6:]\n first_day_have = combine_df[start[0]:end[0]]['time'].values\n full_time = combine_df['time'].unique()\n full_time.sort()\n first_day_lack = [x for x in full_time[-179:]]\n first_day_lack.sort()\n lack_array = np.empty(shape=(len(first_day_lack), 12))\n lack_array.fill(np.nan)\n first_day_lack_df = pd.DataFrame(lack_array, columns=combine_df.columns)\n first_day_lack_df['time'] = first_day_lack\n first_day_lack_df['date'] = last_day\n first_day_lack_df['时间'] = first_day_lack_df['date'\n ] + ' ' + first_day_lack_df['time']\n last_df = pd.read_csv(contract_main_pool[0], encoding='gbk')\n last_df['date'] = last_df['时间'].str[:10]\n last_df['time'] = last_df['时间'].str[11:]\n last_time_pool = last_df.loc[last_df['date'] == last_day]['time'].values\n last_day_have_date = []\n if last_time_pool.shape[0] > 0:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}有夜盘数据,需要读取覆盖')\n last_day_have_date = [x for x in last_time_pool]\n if last_day_have_date:\n for index in range(len(last_day_have_date)):\n origanl_index = last_df.loc[(last_df['date'] == last_day) & (\n last_df['time'] == last_day_have_date[index])].index[0]\n target_index = first_day_lack_df.loc[first_day_lack_df['time'] ==\n last_day_have_date[index]].index[0]\n first_day_lack_df.iloc[target_index] = last_df.iloc[origanl_index]\n else:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}没有夜盘数据,不需要读取覆盖')\n print('直接使用np.nan填充上一个交易日的夜盘数据')\n for index in range(first_day_lack_df.shape[0]):\n combine_df = combine_df.append(first_day_lack_df.iloc[index])\n combine_df['时间'] = combine_df['时间'].str.replace('-', '')\n combine_df['date'] = combine_df['date'].str.replace('-', '')\n combine_df.sort_values('时间', inplace=True)\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n col_type_list = ['开', '高', '低', '收', '成交量', '成交额', '持仓量']\n dir_name_list = ['open', 'high', 'low', 'close', 'volume', 'amount',\n 'position']\n merge_df = pd.DataFrame({'time': time_0931_15})\n combine_df['date'] = combine_df['时间'].str[:8]\n for index in range(len(col_type_list)):\n col_type = col_type_list[index]\n csv_df = pd.DataFrame()\n for s_index, e_index in zip(start, end):\n res = combine_df.iloc[s_index:e_index, :]\n one_date_df = pd.DataFrame(res[col_type].values.reshape(1, -1),\n columns=res['time'].values.tolist())\n one_date_df['main_contract_code'] = res.iloc[-1]['合约代码']\n one_date_df['date'] = res.iloc[-1]['date']\n col_layout = ['date']\n col_layout = np.hstack((col_layout, res['time'].values.tolist()))\n col_layout = np.hstack((col_layout, ['main_contract_code']))\n one_date_df = one_date_df[col_layout]\n csv_df = pd.concat([csv_df, one_date_df])\n orignal_csv_df = pd.read_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv')\n column_ouput_form = orignal_csv_df.columns.values\n orignal_date_pool = pd.to_datetime(orignal_csv_df['date'], format=\n '%Y-%m-%d').values\n current_date_pool = pd.to_datetime(csv_df['date'], format='%Y-%m-%d'\n ).values\n orignal_csv_df['date'] = pd.to_datetime(orignal_csv_df['date'],\n format='%Y-%m-%d').dt.strftime('%Y-%m-%d')\n csv_df['date'] = pd.to_datetime(csv_df['date'], format='%Y%m%d'\n ).dt.strftime('%Y-%m-%d')\n main_code = csv_df['main_contract_code'].iloc[0]\n main_code_num = csv_df['main_contract_code'].str.findall('[0-9]+'\n ).iloc[0][0]\n if len(main_code_num) == 3:\n print(f'合约代码{main_code}缺少一位数字,将被替换')\n csv_df['main_contract_code'] = csv_df['main_contract_code'].str[:2\n ] + month[0] + csv_df['main_contract_code'].str[2:]\n main_code = csv_df['main_contract_code'].iloc[0]\n print(f'合约代码{main_code}')\n intersection_pool = [date for date in orignal_date_pool if date in\n current_date_pool]\n if not intersection_pool:\n print(\n f'新旧数据没有时间交集,{contract_kind} {dir_name_list[index]} 将被添加到先前数据中'\n )\n orignal_csv_df = pd.concat([orignal_csv_df, csv_df])\n orignal_csv_df.sort_values('date', inplace=True)\n orignal_csv_df = orignal_csv_df[column_ouput_form]\n orignal_csv_df.to_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv', index=False)\n print(f'期货品种{contract_kind} {dir_name_list[index]} 完成')\n else:\n print(\n f'新旧数据的时间出现交集!!{contract_kind} {dir_name_list[index]} 将不会被添加到先前数据中'\n )\n",
"step-4": "import numpy as np\nimport pandas as pd\nfrom unrar import rarfile\nimport numpy as np\nimport pandas as pd\nimport tushare as ts\nimport os\nyear_month = '201911'\ncontract_kind = 'NI'\nrar_data_file_path = (\n 'C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/'\n )\nmain_code_path = (\n 'C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/main/main_/'\n )\nclean_data_path = 'D:/1_min补充统一/'\nend_date = '20200107'\ntime_range_path = 'D:/统一所有品种时间范围.csv'\n\n\ndef renew_commodity_future(year_month: str, contract_kind: str,\n main_code_path: str, rar_data_file_path: str, clean_data_path: str,\n time_range_path: str, end_date: str, commodity_bool=True):\n \"\"\"\n 用于更新月度的商品期货数据\n year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911\n contract_kind:放对应品种的list 类似['A','B']\n main_code_path:对应存放主力合约的地方\n rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名\n clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处\n time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv\n end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动\n commodity_bool:商品期货对应True,金融期货False,默认商品期货\n \"\"\"\n month = year_month\n if commodity_bool:\n file_name = rar_data_file_path + 'FutAC_Min1_Std_' + month + '.rar'\n else:\n file_name = rar_data_file_path + 'FutSF_Min1_Std_' + month + '.rar'\n orignial_path = main_code_path\n specifi_path = orignial_path + contract_kind + '_1day_main.npy'\n rar = rarfile.RarFile(file_name, pwd='www.jinshuyuan.net')\n orignal_clean_csv_path = clean_data_path\n pwd = 'www.jinshuyuan.net'\n data = np.load(specifi_path)\n time_0931_15 = pd.read_csv(time_range_path)['date'].values.tolist()\n rar.extractall(path=file_name.split('.')[0])\n pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6'\n )\n ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')\n date_df = pro.trade_cal(exchange='DCE', start_date='20100101', end_date\n =end_date)\n date_df = date_df.loc[date_df['is_open'] == 1]\n date_list = date_df['cal_date'].tolist()\n date_df = pd.DataFrame({'date': date_list})\n date_df['month'] = date_df['date'].str[:6]\n target_date = date_df.loc[date_df['month'] == month]\n target_date_index = target_date.index.values\n target_date = target_date['date'].values\n data = data.reshape(-1)\n contract_main_pool = data[target_date_index]\n contract_main_pool = (pd.Series(contract_main_pool).str.split('.').str[\n 0] + '.csv').values\n file_pools = os.listdir(file_name.split('.')[0])\n if contract_main_pool[0] not in file_pools:\n contract_main_pool = [contract_file.lower() for contract_file in\n contract_main_pool]\n if contract_main_pool[0] not in file_pools:\n print(f'找不到{contract_main_pool[0]}')\n contract_main_pool = (file_name.split('.')[0] + '/' + pd.Series(\n contract_main_pool)).values\n row_1 = ['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量']\n orignal_data = []\n orignal_data.append(row_1)\n for index in range(len(target_date)):\n date = target_date[index]\n one_file_path = contract_main_pool[index]\n df = pd.read_csv(one_file_path, encoding='gbk')\n df['date'] = df['时间'].str[:10]\n df['date2'] = df['date'].str.replace('-', '')\n result = df.loc[df['date2'] == date]\n if result.shape[0] > 0:\n for row_index in range(len(result)):\n target_row = result.iloc[row_index].tolist()\n clean_row = target_row[:-2]\n orignal_data.append(clean_row)\n print(f'{contract_kind} {date} finished!')\n else:\n print(f'没找到合约品种{contract_kind}在{date}')\n print(f'{contract_kind}在{month}月的主力合约数据读取完成')\n final_df = pd.DataFrame(orignal_data[1:], columns=orignal_data[0])\n final_df['date'] = final_df['时间'].str[:10]\n final_df_date = final_df['date'].unique()\n final_df['date'] = final_df['时间'].str[:10]\n final_df['time'] = final_df['时间'].str[10:].str.strip()\n final_df['时间'] = final_df['date'] + ' ' + final_df['time']\n final_df = final_df.sort_values('时间')\n final_df['合约代码'] = final_df['合约代码'].str.upper()\n final_df = final_df.sort_values('时间')\n final_df['transf_date'] = pd.to_datetime(final_df['date'])\n final_df.set_index('transf_date', inplace=True)\n combine_all_df = pd.DataFrame()\n final_df['date2'] = final_df['date'].str.replace('-', '')\n for date_index in range(len(target_date)):\n target_df = final_df.loc[final_df['date2'] == target_date[date_index]]\n target_num = len(target_df)\n theory_num = len(time_0931_15)\n if target_num > 0:\n have_time = target_df['time'].values.tolist()\n lack_time = [x for x in time_0931_15 if x not in have_time]\n if lack_time:\n print(f'{target_date[date_index]} 不连续')\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n if len(lack_time) < len(time_0931_15):\n insert_df['合约代码'] = target_df['合约代码'].unique()[-1]\n combine_insert_df = pd.concat([target_df, insert_df])\n combine_all_df = pd.concat([combine_all_df, combine_insert_df])\n else:\n print(f'{target_date[date_index]}empty ')\n lack_time = [x for x in time_0931_15]\n insert_array = np.empty(shape=(len(lack_time), 12))\n insert_array.fill(np.nan)\n insert_df = pd.DataFrame(insert_array, columns=['市场代码', '合约代码',\n '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量', 'date', 'time'])\n insert_df['date'] = target_date[date_index]\n insert_df['time'] = lack_time\n combine_all_df = pd.concat([combine_all_df, insert_df])\n combine_all_df['时间'] = combine_all_df['date'] + ' ' + combine_all_df['time'\n ]\n combine_all_df = combine_all_df.sort_values('时间')\n combine_all_df.reset_index(inplace=True)\n combine_all_df = combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低',\n '收', '成交量', '成交额', '持仓量', 'date', 'time']]\n combine_all_df['时间'] = combine_all_df['时间'].str.replace('-', '')\n combine_all_df['date'] = combine_all_df['date'].str.replace('-', '')\n combine_df = combine_all_df.copy()\n contract_type = contract_kind\n combine_df = combine_df.sort_values('时间')\n end_time = '15:15:00'\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n last_day = date_df['date'].iloc[target_date_index[0] - 1]\n last_day = last_day[:4] + '-' + last_day[4:6] + '-' + last_day[6:]\n first_day_have = combine_df[start[0]:end[0]]['time'].values\n full_time = combine_df['time'].unique()\n full_time.sort()\n first_day_lack = [x for x in full_time[-179:]]\n first_day_lack.sort()\n lack_array = np.empty(shape=(len(first_day_lack), 12))\n lack_array.fill(np.nan)\n first_day_lack_df = pd.DataFrame(lack_array, columns=combine_df.columns)\n first_day_lack_df['time'] = first_day_lack\n first_day_lack_df['date'] = last_day\n first_day_lack_df['时间'] = first_day_lack_df['date'\n ] + ' ' + first_day_lack_df['time']\n last_df = pd.read_csv(contract_main_pool[0], encoding='gbk')\n last_df['date'] = last_df['时间'].str[:10]\n last_df['time'] = last_df['时间'].str[11:]\n last_time_pool = last_df.loc[last_df['date'] == last_day]['time'].values\n last_day_have_date = []\n if last_time_pool.shape[0] > 0:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}有夜盘数据,需要读取覆盖')\n last_day_have_date = [x for x in last_time_pool]\n if last_day_have_date:\n for index in range(len(last_day_have_date)):\n origanl_index = last_df.loc[(last_df['date'] == last_day) & (\n last_df['time'] == last_day_have_date[index])].index[0]\n target_index = first_day_lack_df.loc[first_day_lack_df['time'] ==\n last_day_have_date[index]].index[0]\n first_day_lack_df.iloc[target_index] = last_df.iloc[origanl_index]\n else:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}没有夜盘数据,不需要读取覆盖')\n print('直接使用np.nan填充上一个交易日的夜盘数据')\n for index in range(first_day_lack_df.shape[0]):\n combine_df = combine_df.append(first_day_lack_df.iloc[index])\n combine_df['时间'] = combine_df['时间'].str.replace('-', '')\n combine_df['date'] = combine_df['date'].str.replace('-', '')\n combine_df.sort_values('时间', inplace=True)\n end_index = np.where(combine_df['time'] == end_time)[0] + 1\n end_index = np.hstack(([0], end_index))\n start = end_index[:-1]\n end = end_index[1:]\n col_type_list = ['开', '高', '低', '收', '成交量', '成交额', '持仓量']\n dir_name_list = ['open', 'high', 'low', 'close', 'volume', 'amount',\n 'position']\n merge_df = pd.DataFrame({'time': time_0931_15})\n combine_df['date'] = combine_df['时间'].str[:8]\n for index in range(len(col_type_list)):\n col_type = col_type_list[index]\n csv_df = pd.DataFrame()\n for s_index, e_index in zip(start, end):\n res = combine_df.iloc[s_index:e_index, :]\n one_date_df = pd.DataFrame(res[col_type].values.reshape(1, -1),\n columns=res['time'].values.tolist())\n one_date_df['main_contract_code'] = res.iloc[-1]['合约代码']\n one_date_df['date'] = res.iloc[-1]['date']\n col_layout = ['date']\n col_layout = np.hstack((col_layout, res['time'].values.tolist()))\n col_layout = np.hstack((col_layout, ['main_contract_code']))\n one_date_df = one_date_df[col_layout]\n csv_df = pd.concat([csv_df, one_date_df])\n orignal_csv_df = pd.read_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv')\n column_ouput_form = orignal_csv_df.columns.values\n orignal_date_pool = pd.to_datetime(orignal_csv_df['date'], format=\n '%Y-%m-%d').values\n current_date_pool = pd.to_datetime(csv_df['date'], format='%Y-%m-%d'\n ).values\n orignal_csv_df['date'] = pd.to_datetime(orignal_csv_df['date'],\n format='%Y-%m-%d').dt.strftime('%Y-%m-%d')\n csv_df['date'] = pd.to_datetime(csv_df['date'], format='%Y%m%d'\n ).dt.strftime('%Y-%m-%d')\n main_code = csv_df['main_contract_code'].iloc[0]\n main_code_num = csv_df['main_contract_code'].str.findall('[0-9]+'\n ).iloc[0][0]\n if len(main_code_num) == 3:\n print(f'合约代码{main_code}缺少一位数字,将被替换')\n csv_df['main_contract_code'] = csv_df['main_contract_code'].str[:2\n ] + month[0] + csv_df['main_contract_code'].str[2:]\n main_code = csv_df['main_contract_code'].iloc[0]\n print(f'合约代码{main_code}')\n intersection_pool = [date for date in orignal_date_pool if date in\n current_date_pool]\n if not intersection_pool:\n print(\n f'新旧数据没有时间交集,{contract_kind} {dir_name_list[index]} 将被添加到先前数据中'\n )\n orignal_csv_df = pd.concat([orignal_csv_df, csv_df])\n orignal_csv_df.sort_values('date', inplace=True)\n orignal_csv_df = orignal_csv_df[column_ouput_form]\n orignal_csv_df.to_csv(orignal_clean_csv_path + contract_kind +\n '_1min_' + dir_name_list[index] + '.csv', index=False)\n print(f'期货品种{contract_kind} {dir_name_list[index]} 完成')\n else:\n print(\n f'新旧数据的时间出现交集!!{contract_kind} {dir_name_list[index]} 将不会被添加到先前数据中'\n )\n",
"step-5": "import numpy as np \nimport pandas as pd\nfrom unrar import rarfile\nimport numpy as np \nimport pandas as pd\nimport tushare as ts\nimport os\n\nyear_month='201911'\ncontract_kind='NI'\nrar_data_file_path='C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/'\nmain_code_path='C:/Users/lenovo/Documents/WeChat Files/yiranli13/FileStorage/File/2020-01/main/main_/'\nclean_data_path='D:/1_min补充统一/'\nend_date='20200107'\ntime_range_path='D:/统一所有品种时间范围.csv'\n# save_month_fill_data_path='D:/1_min补充统一/'+contract_kind+'主力连续'+'_'+month+'.csv'\ndef renew_commodity_future(year_month:str,contract_kind:str,main_code_path:str,rar_data_file_path:str,clean_data_path:str,time_range_path:str,end_date:str,commodity_bool=True):\n '''\n 用于更新月度的商品期货数据\n year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911\n contract_kind:放对应品种的list 类似['A','B']\n main_code_path:对应存放主力合约的地方\n rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名\n clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处\n time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv\n end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动\n commodity_bool:商品期货对应True,金融期货False,默认商品期货\n '''\n month=year_month\n if commodity_bool: \n file_name=rar_data_file_path+'FutAC_Min1_Std_'+month+'.rar'\n else:\n file_name=rar_data_file_path+'FutSF_Min1_Std_'+month+'.rar'\n orignial_path=main_code_path\n specifi_path=orignial_path+contract_kind+'_1day_main.npy'\n rar = rarfile.RarFile(file_name,pwd='www.jinshuyuan.net')\n # 原始的处理好的数据\n orignal_clean_csv_path=clean_data_path\n pwd='www.jinshuyuan.net'\n data=np.load(specifi_path)\n time_0931_15=pd.read_csv(time_range_path)['date'].values.tolist()\n rar.extractall(path=file_name.split('.')[0])\n # 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐\n # 按照月份确定位置\n pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')\n ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')\n date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)\n date_df=date_df.loc[date_df['is_open']==1]\n date_list=date_df['cal_date'].tolist()\n # ==========================================================================\n # 针对的是201911月数据,对应的合约index 放在 target_date_index中\n date_df=pd.DataFrame({'date':date_list})\n date_df['month']=date_df['date'].str[:6]\n target_date=date_df.loc[date_df['month']==month]\n target_date_index=target_date.index.values\n target_date=target_date['date'].values\n # 获取对应目标\n data=data.reshape(-1)\n contract_main_pool=data[target_date_index]\n # 去掉交易所的代码编号\n contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values\n file_pools=os.listdir(file_name.split('.')[0])\n # 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断\n if contract_main_pool[0] not in file_pools:\n contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]\n if contract_main_pool[0] not in file_pools:\n print(f'找不到{contract_main_pool[0]}')\n # 读取好所有的路径\n contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values\n # (len(target_date),contract_main_pool.shape[0])\n row_1=['市场代码','合约代码',\t'时间',\t'开','高',\t'低',\t'收',\t'成交量',\t'成交额',\t'持仓量']\n orignal_data=[]\n orignal_data.append(row_1)\n for index in range(len(target_date)):\n date=target_date[index]\n one_file_path=contract_main_pool[index]\n df=pd.read_csv(one_file_path,encoding='gbk')\n df['date']=df['时间'].str[:10]\n df['date2']=df['date'].str.replace('-','')\n result=df.loc[df['date2']==date]\n if result.shape[0]>0:\n for row_index in range(len(result)):\n target_row=result.iloc[row_index].tolist()\n clean_row=target_row[:-2]\n orignal_data.append(clean_row)\n print(f'{contract_kind} {date} finished!')\n else:\n print(f'没找到合约品种{contract_kind}在{date}')\n print(f'{contract_kind}在{month}月的主力合约数据读取完成')\n final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])\n\n final_df['date']=final_df['时间'].str[:10]\n final_df_date=final_df['date'].unique()\n\n final_df['date']=final_df['时间'].str[:10]\n final_df['time']=final_df['时间'].str[10:].str.strip()\n final_df['时间']=final_df['date']+' '+final_df['time']\n final_df=final_df.sort_values('时间')\n final_df['合约代码']=final_df['合约代码'].str.upper()\n final_df=final_df.sort_values('时间')\n # ===============================增加了从constant_time进行截取================================\n final_df['transf_date']=pd.to_datetime(final_df['date'])\n final_df.set_index('transf_date',inplace=True)\n combine_all_df=pd.DataFrame()\n final_df['date2']=final_df['date'].str.replace('-','')\n # 按月进行填充\n # 设置了存放按月填充的路径\n for date_index in range(len(target_date)):\n\n #按日期进行分割\n target_df=final_df.loc[final_df['date2']==target_date[date_index]]\n #分割到的长度放入容器中\n target_num=len(target_df)\n #理论长度\n theory_num=len(time_0931_15)\n #实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的\n if target_num>0:\n #开始区分2,3情况\n have_time=target_df['time'].values.tolist()\n lack_time=[x for x in time_0931_15 if x not in have_time]\n #检查是不是情况2\n if lack_time:\n print(f'{target_date[date_index]} 不连续')\n #一共12列,先全部填充nan的时候,最后再把已知填入\n insert_array=np.empty(shape=(len(lack_time),12))\n insert_array.fill(np.nan)\n insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])\n insert_df['date']=target_date[date_index]\n insert_df['time']=lack_time\n #缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充\n if len(lack_time)<len(time_0931_15):\n \n insert_df['合约代码']=target_df['合约代码'].unique()[-1]\n #生成一天完整的数据\n combine_insert_df=pd.concat([target_df,insert_df])\n #将数据添加到容器中\n combine_all_df=pd.concat([combine_all_df,combine_insert_df]) \n \n #完全没有数据,直接填充 \n else:\n print(f'{target_date[date_index]}empty ')\n lack_time=[x for x in time_0931_15]\n #一共12列,先全部填充nan的时候,最后再把已知填入\n insert_array=np.empty(shape=(len(lack_time),12))\n insert_array.fill(np.nan)\n insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])\n insert_df['date']=target_date[date_index]\n insert_df['time']=lack_time\n #将数据添加到容器\n combine_all_df=pd.concat([combine_all_df,insert_df])\n combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']\n #调整时间\n combine_all_df=combine_all_df.sort_values('时间')\n\n combine_all_df.reset_index(inplace=True)\n #数据输出,按设定的顺序\n combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量','date','time']]\n combine_all_df['时间']=combine_all_df['时间'].str.replace('-','')\n combine_all_df['date']=combine_all_df['date'].str.replace('-','')\n # combine_all_df.to_csv(save_month_fill_data_path,index=False,encoding='utf-8-sig')\n # ==========================储存数据=================================================\n combine_df=combine_all_df.copy()\n contract_type=contract_kind\n combine_df=combine_df.sort_values('时间')\n # ====================================================================开始截取============================================================\n # end_time+1其实是可以作为每次截取的起点,终点下一个就是起点,不过要加上0,而终点的位置也可以是end_time+1,因为end_time+1只能取end_time\n # 按照下午15:15统一截取\n end_time='15:15:00'\n end_index=np.where(combine_df['time']==end_time)[0]+1\n end_index=np.hstack(([0],end_index))\n start=end_index[:-1]\n end=end_index[1:]\n # ================================================================缺失第一个交易日前一天的夜盘数据==========================================\n # 这里的选择构造一个虚拟的时间戳,来满足缺失的夜盘数据\n # 按照上一步的截取方法,第一个交易日缺少前一天的夜盘数据\n last_day=date_df['date'].iloc[target_date_index[0]-1]\n last_day=last_day[:4]+'-'+last_day[4:6]+'-'+last_day[6:]\n first_day_have=combine_df[start[0]:end[0]]['time'].values\n full_time=combine_df['time'].unique()\n full_time.sort()\n first_day_lack=[x for x in full_time[-179:]]\n first_day_lack.sort()\n lack_array=np.empty(shape=(len(first_day_lack),12))\n lack_array.fill(np.nan)\n # ===============================准备缺失部分df==========================================================================================\n first_day_lack_df=pd.DataFrame(lack_array,columns=combine_df.columns)\n first_day_lack_df['time']=first_day_lack\n first_day_lack_df['date']=last_day\n first_day_lack_df['时间']=first_day_lack_df['date']+' '+first_day_lack_df['time']\n\n last_df=pd.read_csv(contract_main_pool[0],encoding='gbk')\n # 确定之前的有没有夜盘\n last_df['date']=last_df['时间'].str[:10]\n last_df['time']=last_df['时间'].str[11:]\n # 补夜盘数据\n last_time_pool=last_df.loc[last_df['date']==last_day]['time'].values\n\n last_day_have_date=[]\n # 说明在上个交易日有数据\n if last_time_pool.shape[0]>0:\n \n print(f'期货品种{contract_kind}在前一个交易日{last_day}有夜盘数据,需要读取覆盖')\n last_day_have_date=[x for x in last_time_pool]\n if last_day_have_date:\n for index in range(len(last_day_have_date)):\n origanl_index=last_df.loc[(last_df['date']==last_day)&(last_df['time']==last_day_have_date[index])].index[0]\n target_index=first_day_lack_df.loc[first_day_lack_df['time']==last_day_have_date[index]].index[0]\n first_day_lack_df.iloc[target_index]=last_df.iloc[origanl_index]\n else:\n print(f'期货品种{contract_kind}在前一个交易日{last_day}没有夜盘数据,不需要读取覆盖')\n print('直接使用np.nan填充上一个交易日的夜盘数据')\n for index in range(first_day_lack_df.shape[0]):\n combine_df=combine_df.append(first_day_lack_df.iloc[index])\n combine_df['时间']=combine_df['时间'].str.replace('-','')\n combine_df['date']=combine_df['date'].str.replace('-','')\n combine_df.sort_values('时间',inplace=True)\n # =================================缺失部分填充=========================================================================================\n # combine_df=pd.concat([first_day_lack_df,combine_df])\n # # ================================重新按时间排序========================================================================================\n # combine_df=combine_df.sort_values('时间')\n # ============================重新进行切割===============================================================================================\n end_index=np.where(combine_df['time']==end_time)[0]+1\n end_index=np.hstack(([0],end_index))\n start=end_index[:-1]\n end=end_index[1:]\n\n # ==============================进行分割按照特定时间,明确col===============================================================================\n\n col_type_list=['开','高','低','收','成交量','成交额','持仓量']\n dir_name_list=['open','high','low','close','volume','amount','position']\n #这个变量现在没有用\n #交易到凌晨01\n #merge_df=pd.DataFrame({'time':with_night_01})\n #交易到凌晨0230,version中没有集合竞价时间,time_0931_15去掉9:00,21:00\n merge_df=pd.DataFrame({'time':time_0931_15})\n\n combine_df['date']=combine_df['时间'].str[:8]\n for index in range(len(col_type_list)):\n\n col_type=col_type_list[index]\n # 用来接收分col数据的容器\n csv_df=pd.DataFrame()\n for s_index,e_index in zip(start,end):\n\n # =========================================截取每个交易日数据==============================================================================\n res=combine_df.iloc[s_index:e_index,:]\n one_date_df=pd.DataFrame(res[col_type].values.reshape(1,-1),columns=res['time'].values.tolist())\n one_date_df['main_contract_code']=res.iloc[-1]['合约代码']\n one_date_df['date']=res.iloc[-1]['date']\n # =======================================设置输出格式====================================================================================\n\n col_layout=['date']\n col_layout=np.hstack((col_layout,res['time'].values.tolist()))\n col_layout=np.hstack((col_layout,['main_contract_code']))\n one_date_df=one_date_df[col_layout]\n # =======================================合并数据========================================================================================\n csv_df=pd.concat([csv_df,one_date_df])\n # ========================追加原始数据=======================================\n # 时间问题需要处理,不然对不齐\n # 在测试文件中测试,所以修改了路径\n orignal_csv_df=pd.read_csv(orignal_clean_csv_path+contract_kind+'_1min_'+dir_name_list[index]+'.csv')\n column_ouput_form=orignal_csv_df.columns.values\n orignal_date_pool=pd.to_datetime(orignal_csv_df['date'],format='%Y-%m-%d').values\n current_date_pool=pd.to_datetime(csv_df['date'],format='%Y-%m-%d').values\n orignal_csv_df['date']=pd.to_datetime(orignal_csv_df['date'],format='%Y-%m-%d').dt.strftime('%Y-%m-%d')\n csv_df['date']=pd.to_datetime(csv_df['date'],format='%Y%m%d').dt.strftime('%Y-%m-%d')\n # check代码中的数字个数等于四个\n main_code=csv_df['main_contract_code'].iloc[0]\n main_code_num=csv_df['main_contract_code'].str.findall(r'[0-9]+').iloc[0][0]\n if len(main_code_num)==3:\n print(f'合约代码{main_code}缺少一位数字,将被替换')\n csv_df['main_contract_code']=csv_df['main_contract_code'].str[:2]+month[0]+csv_df['main_contract_code'].str[2:]\n main_code=csv_df['main_contract_code'].iloc[0]\n print(f'合约代码{main_code}')\n # 查看有没有交集,如果有交集会停止,说明进行了重复操作\n \n intersection_pool=[date for date in orignal_date_pool if date in current_date_pool]\n if not intersection_pool:\n print(f'新旧数据没有时间交集,{contract_kind} {dir_name_list[index]} 将被添加到先前数据中')\n orignal_csv_df=pd.concat([orignal_csv_df,csv_df]) \n orignal_csv_df.sort_values('date',inplace=True)\n orignal_csv_df=orignal_csv_df[column_ouput_form]\n orignal_csv_df.to_csv(orignal_clean_csv_path+contract_kind+'_1min_'+dir_name_list[index]+'.csv',index=False)\n print(f'期货品种{contract_kind} {dir_name_list[index]} 完成')\n else:\n print(f'新旧数据的时间出现交集!!{contract_kind} {dir_name_list[index]} 将不会被添加到先前数据中')\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
'''
Задание 12.3
Создать функцию print_ip_table, которая отображает таблицу доступных и недоступных IP-адресов.
Функция ожидает как аргументы два списка:
* список доступных IP-адресов
* список недоступных IP-адресов
Результат работы функции - вывод на стандартный поток вывода таблицы вида:
Reachable Unreachable
----------- -------------
10.1.1.1 10.1.1.7
10.1.1.2 10.1.1.8
10.1.1.9
Функция не должна изменять списки, которые переданы ей как аргументы.
То есть, до выполнения функции и после списки должны выглядеть одинаково.
Для этого задания нет тестов
'''
import subprocess
import ipaddress
from tabulate import tabulate
def ping_ip_addresses(ip_addresses):
result1 = []
result2 = []
for ip_address in ip_addresses:
reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8')
if reply.returncode == 0:
result1.append(ip_address)
else:
result2.append(ip_address)
result = tuple([result1,result2])
return result
def convert_ranges_to_ip_list(list_of_ip_addresses):
result=[]
for item in list_of_ip_addresses:
source = item.split('-')
if len(source) == 1:
result.append(source[0])
else:
k = 0
source2 = source[0].split('.')
m = int(source2[3])
if len(source[1]) == 1:
k = int(source[1])
else:
source1 = source[1].split('.')
k = int(source1[3])
ip1 = ipaddress.ip_address(source[0])
for i in range(m, k+1):
result.append(str(ip1))
ip1 += 1
return result
columns = ['Reachable', 'Unreachable']
sh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4', '172.19.30.1-172.19.30.254']))
print(tabulate(sh_ip, headers=columns))
|
normal
|
{
"blob_id": "dd7e8556405f07172ce2b1e9f486c2cd2f4bad58",
"index": 7613,
"step-1": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\n<mask token>\nprint(tabulate(sh_ip, headers=columns))\n",
"step-3": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4',\n '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-4": "<mask token>\nimport subprocess\nimport ipaddress\nfrom tabulate import tabulate\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4',\n '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-5": "# -*- coding: utf-8 -*-\n'''\nЗадание 12.3\n\n\nСоздать функцию print_ip_table, которая отображает таблицу доступных и недоступных IP-адресов.\n\nФункция ожидает как аргументы два списка:\n* список доступных IP-адресов\n* список недоступных IP-адресов\n\nРезультат работы функции - вывод на стандартный поток вывода таблицы вида:\n\nReachable Unreachable\n----------- -------------\n10.1.1.1 10.1.1.7\n10.1.1.2 10.1.1.8\n 10.1.1.9\n\nФункция не должна изменять списки, которые переданы ей как аргументы.\nТо есть, до выполнения функции и после списки должны выглядеть одинаково.\n\n\nДля этого задания нет тестов\n'''\nimport subprocess\nimport ipaddress\nfrom tabulate import tabulate\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1,result2])\n return result\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result=[]\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k+1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4', '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class StockType(Base, BaseModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __json__(self, _):
return {'id': self.id, 'name': self.name}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StockType(Base, BaseModel):
__tablename__ = 'stock_type'
name = Column(String(255), nullable=False)
stocks = relationship('Stock', back_ref='stock_type')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {'id': self.id, 'name': self.name}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Stock(Base, BaseModel):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __json__(self, _):
return {'id': self.id, 'name': self.name, 'starting_price': self.
starting_price, 'current_price': self.current_price,
'max_price': self.max_price, 'min_price': self.min_price,
'starting_stock': self.starting_stock, 'current_stock': self.
current_stock}
class StockType(Base, BaseModel):
__tablename__ = 'stock_type'
name = Column(String(255), nullable=False)
stocks = relationship('Stock', back_ref='stock_type')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {'id': self.id, 'name': self.name}
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Stock(Base, BaseModel):
__tablename__ = 'stock'
name = Column(String(255), nullable=False)
starting_price = Column(Float, nullable=False)
current_price = Column(Float, nullable=False)
max_price = Column(Float, nullable=True)
min_price = Column(Float, nullable=True)
starting_stock = Column(Integer, nullable=True)
current_stock = Column(Integer, nullable=True)
stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))
stock_type = relationship('StockType', back_ref='stocks')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {'id': self.id, 'name': self.name, 'starting_price': self.
starting_price, 'current_price': self.current_price,
'max_price': self.max_price, 'min_price': self.min_price,
'starting_stock': self.starting_stock, 'current_stock': self.
current_stock}
class StockType(Base, BaseModel):
__tablename__ = 'stock_type'
name = Column(String(255), nullable=False)
stocks = relationship('Stock', back_ref='stock_type')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {'id': self.id, 'name': self.name}
<|reserved_special_token_1|>
from sqlalchemy import (Column, Integer, Float, String, ForeignKey)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
from .meta import Base, BaseModel
class Stock(Base, BaseModel):
__tablename__ = 'stock'
name = Column(String(255), nullable=False)
starting_price = Column(Float, nullable=False)
current_price = Column(Float, nullable=False)
max_price = Column(Float, nullable=True)
min_price = Column(Float, nullable=True)
starting_stock = Column(Integer, nullable=True)
current_stock = Column(Integer, nullable=True)
stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))
stock_type = relationship('StockType', back_ref='stocks')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {
"id": self.id,
"name": self.name,
"starting_price": self.starting_price,
"current_price": self.current_price,
"max_price": self.max_price,
"min_price": self.min_price,
"starting_stock": self.starting_stock,
"current_stock": self.current_stock
}
class StockType(Base, BaseModel):
__tablename__ = 'stock_type'
name = Column(String(255), nullable=False)
stocks = relationship('Stock', back_ref='stock_type')
user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))
user = relationship('User')
def __json__(self, _):
return {
"id": self.id,
"name": self.name
}
|
flexible
|
{
"blob_id": "7251d32918b16166e9b7c9613726e6dc51d6fea4",
"index": 3834,
"step-1": "<mask token>\n\n\nclass StockType(Base, BaseModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-2": "<mask token>\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-3": "<mask token>\n\n\nclass Stock(Base, BaseModel):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name, 'starting_price': self.\n starting_price, 'current_price': self.current_price,\n 'max_price': self.max_price, 'min_price': self.min_price,\n 'starting_stock': self.starting_stock, 'current_stock': self.\n current_stock}\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-4": "<mask token>\n\n\nclass Stock(Base, BaseModel):\n __tablename__ = 'stock'\n name = Column(String(255), nullable=False)\n starting_price = Column(Float, nullable=False)\n current_price = Column(Float, nullable=False)\n max_price = Column(Float, nullable=True)\n min_price = Column(Float, nullable=True)\n starting_stock = Column(Integer, nullable=True)\n current_stock = Column(Integer, nullable=True)\n stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))\n stock_type = relationship('StockType', back_ref='stocks')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name, 'starting_price': self.\n starting_price, 'current_price': self.current_price,\n 'max_price': self.max_price, 'min_price': self.min_price,\n 'starting_stock': self.starting_stock, 'current_stock': self.\n current_stock}\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {'id': self.id, 'name': self.name}\n",
"step-5": "from sqlalchemy import (Column, Integer, Float, String, ForeignKey)\nfrom sqlalchemy.dialects.postgresql import UUID\nfrom sqlalchemy.orm import relationship\n\nfrom .meta import Base, BaseModel\n\n\nclass Stock(Base, BaseModel):\n __tablename__ = 'stock'\n\n name = Column(String(255), nullable=False)\n starting_price = Column(Float, nullable=False)\n current_price = Column(Float, nullable=False)\n max_price = Column(Float, nullable=True)\n min_price = Column(Float, nullable=True)\n starting_stock = Column(Integer, nullable=True)\n current_stock = Column(Integer, nullable=True)\n\n stock_type_id = Column(UUID(as_uuid=True), ForeignKey('stock_type.id'))\n stock_type = relationship('StockType', back_ref='stocks')\n\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"starting_price\": self.starting_price,\n \"current_price\": self.current_price,\n \"max_price\": self.max_price,\n \"min_price\": self.min_price,\n \"starting_stock\": self.starting_stock,\n \"current_stock\": self.current_stock\n }\n\n\nclass StockType(Base, BaseModel):\n __tablename__ = 'stock_type'\n\n name = Column(String(255), nullable=False)\n stocks = relationship('Stock', back_ref='stock_type')\n\n user_id = Column(UUID(as_uuid=True), ForeignKey('user.id'))\n user = relationship('User')\n\n def __json__(self, _):\n return {\n \"id\": self.id,\n \"name\": self.name\n }\n",
"step-ids": [
2,
3,
5,
6,
8
]
}
|
[
2,
3,
5,
6,
8
] |
<|reserved_special_token_0|>
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
weights = {}
if context.longs:
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
for security in context.portfolio.positions:
if (security not in context.longs and security not in context.
shorts and data.can_trade(security)):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def initialize(context):
schedule_function(my_rebalance, date_rules.every_day(), time_rules.
market_close(minutes=1))
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
<|reserved_special_token_0|>
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
weights = {}
if context.longs:
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
for security in context.portfolio.positions:
if (security not in context.longs and security not in context.
shorts and data.can_trade(security)):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def initialize(context):
schedule_function(my_rebalance, date_rules.every_day(), time_rules.
market_close(minutes=1))
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
<|reserved_special_token_0|>
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
weights = {}
if context.longs:
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
for security in context.portfolio.positions:
if (security not in context.longs and security not in context.
shorts and data.can_trade(security)):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
def before_trading_start(context, data):
"""
Get pipeline results.
"""
pipe_results = pipeline_output('my_pipeline')
context.longs = []
for sec in pipe_results[pipe_results['longs']].index.tolist():
if data.can_trade(sec):
context.longs.append(sec)
context.shorts = []
for sec in pipe_results[pipe_results['shorts']].index.tolist():
if data.can_trade(sec):
context.shorts.append(sec)
def my_rebalance(context, data):
"""
Rebalance daily
"""
for stock in context.portfolio.positions:
order_target_percent(stock, 0.0)
target_weights = compute_target_weights(context, data)
if target_weights:
order_optimal_portfolio(objective=opt.TargetWeights(target_weights),
constraints=[])
<|reserved_special_token_1|>
from quantopian.algorithm import order_optimal_portfolio
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.factors import SimpleMovingAverage
from quantopian.pipeline.filters import QTradableStocksUS
import quantopian.optimize as opt
from quantopian.pipeline.factors import Returns
def initialize(context):
schedule_function(my_rebalance, date_rules.every_day(), time_rules.
market_close(minutes=1))
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
def make_pipeline():
longs = Returns(window_length=2).bottom(1, mask=QTradableStocksUS())
shorts = Returns(window_length=2).top(1, mask=QTradableStocksUS())
return Pipeline(columns={'longs': longs, 'shorts': shorts}, screen=
QTradableStocksUS() & (shorts | longs))
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
weights = {}
if context.longs:
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
for security in context.portfolio.positions:
if (security not in context.longs and security not in context.
shorts and data.can_trade(security)):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
def before_trading_start(context, data):
"""
Get pipeline results.
"""
pipe_results = pipeline_output('my_pipeline')
context.longs = []
for sec in pipe_results[pipe_results['longs']].index.tolist():
if data.can_trade(sec):
context.longs.append(sec)
context.shorts = []
for sec in pipe_results[pipe_results['shorts']].index.tolist():
if data.can_trade(sec):
context.shorts.append(sec)
def my_rebalance(context, data):
"""
Rebalance daily
"""
for stock in context.portfolio.positions:
order_target_percent(stock, 0.0)
target_weights = compute_target_weights(context, data)
if target_weights:
order_optimal_portfolio(objective=opt.TargetWeights(target_weights),
constraints=[])
<|reserved_special_token_1|>
from quantopian.algorithm import order_optimal_portfolio
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.factors import SimpleMovingAverage
from quantopian.pipeline.filters import QTradableStocksUS
import quantopian.optimize as opt
from quantopian.pipeline.factors import Returns
def initialize(context):
# Schedule our rebalance function to run at the end of
# each day, when the market closes
#set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))
#set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))
#set_slippage(slippage.VolumeShareSlippage(0))
schedule_function(
my_rebalance,
date_rules.every_day(),
time_rules.market_close(minutes=1 )
)
# Create our pipeline and attach it to our algorithm.
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
def make_pipeline():
#longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())
#shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())
longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())
shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS())
return Pipeline(
columns={
'longs': longs,
'shorts': shorts,
},
screen=QTradableStocksUS()& (shorts | longs)
)
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
# Initialize empty target weights dictionary.
# This will map securities to their target weight.
weights = {}
# If there are securities in our longs and shorts lists,
# compute even target weights for each security.
if context.longs :
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
#if ~(context.longs & context.shorts):
# return weights
# Exit positions in our portfolio if they are not
# in our longs or shorts lists.
for security in context.portfolio.positions:
if security not in context.longs and security not in context.shorts and data.can_trade(security):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
def before_trading_start(context, data):
"""
Get pipeline results.
"""
# Gets our pipeline output every day.
pipe_results = pipeline_output('my_pipeline')
# Go long in securities for which the 'longs' value is True,
# and check if they can be traded.
context.longs = []
for sec in pipe_results[pipe_results['longs']].index.tolist():
if data.can_trade(sec):
context.longs.append(sec)
#print(context.longs)
#print('Longs: ')
#print(context.longs)
# Go short in securities for which the 'shorts' value is True,
# and check if they can be traded.
context.shorts = []
for sec in pipe_results[pipe_results['shorts']].index.tolist():
if data.can_trade(sec):
context.shorts.append(sec)
#print('Shorts: ')
#print(context.shorts)
def my_rebalance(context, data):
"""
Rebalance daily
"""
for stock in context.portfolio.positions:
#print('selling everything')
#print(stock)
order_target_percent(stock, 0.0)
# Calculate target weights to rebalance
#print(context)
target_weights = compute_target_weights(context, data)
#print(target_weights)
# If we have target weights, rebalance our portfolio
if target_weights:
order_optimal_portfolio(
objective=opt.TargetWeights(target_weights),
constraints=[],
)
|
flexible
|
{
"blob_id": "c447d1fe38a4af43de39e05d46dacbe88249d427",
"index": 3654,
"step-1": "<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-4": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\ndef make_pipeline():\n longs = Returns(window_length=2).bottom(1, mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1, mask=QTradableStocksUS())\n return Pipeline(columns={'longs': longs, 'shorts': shorts}, screen=\n QTradableStocksUS() & (shorts | longs))\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-5": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\ndef initialize(context):\n # Schedule our rebalance function to run at the end of\n # each day, when the market closes\n #set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))\n #set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))\n #set_slippage(slippage.VolumeShareSlippage(0))\n schedule_function(\n my_rebalance,\n date_rules.every_day(),\n time_rules.market_close(minutes=1 )\n )\n\n # Create our pipeline and attach it to our algorithm.\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n\ndef make_pipeline():\n \n #longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())\n #shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())\n longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS()) \n\n return Pipeline(\n columns={\n 'longs': longs,\n 'shorts': shorts,\n },\n screen=QTradableStocksUS()& (shorts | longs)\n )\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n\n # Initialize empty target weights dictionary.\n # This will map securities to their target weight.\n weights = {}\n\n # If there are securities in our longs and shorts lists,\n # compute even target weights for each security.\n if context.longs :\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n #if ~(context.longs & context.shorts):\n # return weights\n\n # Exit positions in our portfolio if they are not\n # in our longs or shorts lists.\n for security in context.portfolio.positions:\n if security not in context.longs and security not in context.shorts and data.can_trade(security):\n weights[security] = 0\n\n for security in context.longs:\n weights[security] = long_weight\n\n for security in context.shorts:\n weights[security] = short_weight\n\n return weights\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n\n # Gets our pipeline output every day.\n pipe_results = pipeline_output('my_pipeline')\n\n # Go long in securities for which the 'longs' value is True,\n # and check if they can be traded.\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n #print(context.longs)\n #print('Longs: ') \n #print(context.longs)\n # Go short in securities for which the 'shorts' value is True,\n # and check if they can be traded.\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n #print('Shorts: ')\n #print(context.shorts)\n \n \n \ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n #print('selling everything')\n #print(stock)\n order_target_percent(stock, 0.0) \n # Calculate target weights to rebalance\n #print(context)\n target_weights = compute_target_weights(context, data)\n #print(target_weights)\n\n # If we have target weights, rebalance our portfolio\n if target_weights:\n order_optimal_portfolio(\n objective=opt.TargetWeights(target_weights),\n constraints=[],\n )\n",
"step-ids": [
1,
2,
4,
6,
7
]
}
|
[
1,
2,
4,
6,
7
] |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
# tail -2 hightemp.txt
import sys
with open(sys.argv[1]) as f:
lines = f.readlines();
n = sys.argv[2];
print "".join(lines[len(lines)-int(n):])
|
normal
|
{
"blob_id": "a1710ee228a432db92c9586ddff0bfcad1f434a8",
"index": 2088,
"step-1": "# !/usr/bin/env python\n# -*- coding: utf-8 -*-\n# tail -2 hightemp.txt\n\n\nimport sys\n\nwith open(sys.argv[1]) as f:\n lines = f.readlines();\n\nn = sys.argv[2];\n\nprint \"\".join(lines[len(lines)-int(n):])",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.initializers import RandomUniform
class Critic:
def __init__(self, obs_dim, action_dim, learning_rate=0.001):
self.obs_dim = obs_dim
self.action_dim = action_dim
self.model = self.make_network()
self.optimizer = keras.optimizers.Adam(learning_rate)
# self.model.compile(loss="mse", optimizer=self.optimizer)
def make_network(self):
obs_input = keras.Input(shape=(self.obs_dim,), dtype="float32", name="obs")
action_input = keras.Input(shape=(self.action_dim,), dtype="float32", name="action")
# layer 0 - with obs input
w_range = 1 / np.sqrt(self.obs_dim)
lr_0 = keras.layers.Dense(400, activation="relu", name="c_lr_0", kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)
# add
lr_0 = keras.layers.BatchNormalization()(lr_0)
# layer 1 with concatenated input of [lr_0, action]
lr_1_input = keras.layers.concatenate([lr_0, action_input])
w_range = 1 / np.sqrt(400.0)
lr_1 = keras.layers.Dense(300, activation="relu", name="c_lr_1", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)
# final layers with linear activation
w_range = 0.003
q_val = keras.layers.Dense(1, activation="linear", name="q_val", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)
model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)
return model
def estimate_q(self, obs, action):
obs = tf.reshape(obs, (-1, self.obs_dim))
action = tf.reshape(action, (-1, self.action_dim))
return self.model([obs, action])
class Actor:
# 输入特征数,动作特征数,奖励
def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):
self.obs_dim = obs_dim
self.action_dim = action_dim
self.action_gain = action_gain
self.model = self.make_network()
self.optimizer = keras.optimizers.Adam(learning_rate)
def make_network(self):
obs_input = keras.Input(shape=(self.obs_dim,), dtype="float32", name="obs")
# layer 0 - with obs input
w_range = 1 / np.sqrt(self.obs_dim)
lr_0 = keras.layers.Dense(400, activation="relu", name="a_lr_0", kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)
# add
lr_0 = keras.layers.BatchNormalization()(lr_0)
# layer 1
w_range = 1 / np.sqrt(400.0)
lr_1 = keras.layers.Dense(300, activation="relu", name="a_lr_1", kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)
# add
lr_1 = keras.layers.BatchNormalization()(lr_1)
# action layer
# tanh 函数输出在(-1, 1)之间,用action_gain缩放
w_range = 0.003
action = self.action_gain * keras.layers.Dense(self.action_dim, activation="tanh", name="action", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)
model = keras.Model(inputs=obs_input, outputs=action)
return model
def act(self, obs):
# 将状态转换为批量的形式
obs = tf.reshape(obs, (-1, self.obs_dim))
return self.model(obs)
if __name__ == "__main__":
actor = Actor(4, 1, 2)
critic = Critic(4, 1)
obs = np.random.rand(4)
action = actor.act(obs)[0]
q_val = critic.estimate_q(obs, action)[0]
# keras.utils.plot_model(actor, 'actor.png', show_shapes=True)
# keras.utils.plot_model(critic, 'critic.png', show_shapes=True)
print("\nRandom actor-critic output for obs={}:".format(obs))
print("Action: {}, Qval: {}".format(action, q_val))
|
normal
|
{
"blob_id": "535fdee8f74b1984c5d1a5ec929310473b01239d",
"index": 1617,
"step-1": "<mask token>\n\n\nclass Critic:\n\n def __init__(self, obs_dim, action_dim, learning_rate=0.001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n action_input = keras.Input(shape=(self.action_dim,), dtype=\n 'float32', name='action')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='c_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n lr_1_input = keras.layers.concatenate([lr_0, action_input])\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='c_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)\n w_range = 0.003\n q_val = keras.layers.Dense(1, activation='linear', name='q_val',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)\n return model\n <mask token>\n\n\nclass Actor:\n\n def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.action_gain = action_gain\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='a_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='a_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)\n lr_1 = keras.layers.BatchNormalization()(lr_1)\n w_range = 0.003\n action = self.action_gain * keras.layers.Dense(self.action_dim,\n activation='tanh', name='action', kernel_initializer=\n RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=obs_input, outputs=action)\n return model\n\n def act(self, obs):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n return self.model(obs)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Critic:\n\n def __init__(self, obs_dim, action_dim, learning_rate=0.001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n action_input = keras.Input(shape=(self.action_dim,), dtype=\n 'float32', name='action')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='c_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n lr_1_input = keras.layers.concatenate([lr_0, action_input])\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='c_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)\n w_range = 0.003\n q_val = keras.layers.Dense(1, activation='linear', name='q_val',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)\n return model\n\n def estimate_q(self, obs, action):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n action = tf.reshape(action, (-1, self.action_dim))\n return self.model([obs, action])\n\n\nclass Actor:\n\n def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.action_gain = action_gain\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='a_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='a_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)\n lr_1 = keras.layers.BatchNormalization()(lr_1)\n w_range = 0.003\n action = self.action_gain * keras.layers.Dense(self.action_dim,\n activation='tanh', name='action', kernel_initializer=\n RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=obs_input, outputs=action)\n return model\n\n def act(self, obs):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n return self.model(obs)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Critic:\n\n def __init__(self, obs_dim, action_dim, learning_rate=0.001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n action_input = keras.Input(shape=(self.action_dim,), dtype=\n 'float32', name='action')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='c_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n lr_1_input = keras.layers.concatenate([lr_0, action_input])\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='c_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)\n w_range = 0.003\n q_val = keras.layers.Dense(1, activation='linear', name='q_val',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)\n return model\n\n def estimate_q(self, obs, action):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n action = tf.reshape(action, (-1, self.action_dim))\n return self.model([obs, action])\n\n\nclass Actor:\n\n def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.action_gain = action_gain\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='a_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='a_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)\n lr_1 = keras.layers.BatchNormalization()(lr_1)\n w_range = 0.003\n action = self.action_gain * keras.layers.Dense(self.action_dim,\n activation='tanh', name='action', kernel_initializer=\n RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=obs_input, outputs=action)\n return model\n\n def act(self, obs):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n return self.model(obs)\n\n\nif __name__ == '__main__':\n actor = Actor(4, 1, 2)\n critic = Critic(4, 1)\n obs = np.random.rand(4)\n action = actor.act(obs)[0]\n q_val = critic.estimate_q(obs, action)[0]\n print('\\nRandom actor-critic output for obs={}:'.format(obs))\n print('Action: {}, Qval: {}'.format(action, q_val))\n",
"step-4": "import numpy as np\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.initializers import RandomUniform\n\n\nclass Critic:\n\n def __init__(self, obs_dim, action_dim, learning_rate=0.001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n action_input = keras.Input(shape=(self.action_dim,), dtype=\n 'float32', name='action')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='c_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n lr_1_input = keras.layers.concatenate([lr_0, action_input])\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='c_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)\n w_range = 0.003\n q_val = keras.layers.Dense(1, activation='linear', name='q_val',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)\n return model\n\n def estimate_q(self, obs, action):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n action = tf.reshape(action, (-1, self.action_dim))\n return self.model([obs, action])\n\n\nclass Actor:\n\n def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.action_gain = action_gain\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype='float32',\n name='obs')\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation='relu', name='a_lr_0',\n kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation='relu', name='a_lr_1',\n kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)\n lr_1 = keras.layers.BatchNormalization()(lr_1)\n w_range = 0.003\n action = self.action_gain * keras.layers.Dense(self.action_dim,\n activation='tanh', name='action', kernel_initializer=\n RandomUniform(-w_range, w_range))(lr_1)\n model = keras.Model(inputs=obs_input, outputs=action)\n return model\n\n def act(self, obs):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n return self.model(obs)\n\n\nif __name__ == '__main__':\n actor = Actor(4, 1, 2)\n critic = Critic(4, 1)\n obs = np.random.rand(4)\n action = actor.act(obs)[0]\n q_val = critic.estimate_q(obs, action)[0]\n print('\\nRandom actor-critic output for obs={}:'.format(obs))\n print('Action: {}, Qval: {}'.format(action, q_val))\n",
"step-5": "import numpy as np\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.initializers import RandomUniform\n\nclass Critic:\n def __init__(self, obs_dim, action_dim, learning_rate=0.001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n # self.model.compile(loss=\"mse\", optimizer=self.optimizer)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype=\"float32\", name=\"obs\")\n action_input = keras.Input(shape=(self.action_dim,), dtype=\"float32\", name=\"action\")\n\n # layer 0 - with obs input\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation=\"relu\", name=\"c_lr_0\", kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n # add\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n\n # layer 1 with concatenated input of [lr_0, action]\n lr_1_input = keras.layers.concatenate([lr_0, action_input])\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation=\"relu\", name=\"c_lr_1\", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1_input)\n\n # final layers with linear activation\n w_range = 0.003\n q_val = keras.layers.Dense(1, activation=\"linear\", name=\"q_val\", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n\n model = keras.Model(inputs=[obs_input, action_input], outputs=q_val)\n return model\n\n def estimate_q(self, obs, action):\n obs = tf.reshape(obs, (-1, self.obs_dim))\n action = tf.reshape(action, (-1, self.action_dim))\n return self.model([obs, action])\n\n\nclass Actor:\n # 输入特征数,动作特征数,奖励\n def __init__(self, obs_dim, action_dim, action_gain, learning_rate=0.0001):\n self.obs_dim = obs_dim\n self.action_dim = action_dim\n self.action_gain = action_gain\n self.model = self.make_network()\n self.optimizer = keras.optimizers.Adam(learning_rate)\n\n def make_network(self):\n obs_input = keras.Input(shape=(self.obs_dim,), dtype=\"float32\", name=\"obs\")\n\n # layer 0 - with obs input\n w_range = 1 / np.sqrt(self.obs_dim)\n lr_0 = keras.layers.Dense(400, activation=\"relu\", name=\"a_lr_0\", kernel_initializer=RandomUniform(-w_range, w_range))(obs_input)\n\n # add\n lr_0 = keras.layers.BatchNormalization()(lr_0)\n\n # layer 1\n w_range = 1 / np.sqrt(400.0)\n lr_1 = keras.layers.Dense(300, activation=\"relu\", name=\"a_lr_1\", kernel_initializer=RandomUniform(-w_range, w_range))(lr_0)\n # add\n lr_1 = keras.layers.BatchNormalization()(lr_1)\n\n # action layer\n # tanh 函数输出在(-1, 1)之间,用action_gain缩放\n w_range = 0.003\n action = self.action_gain * keras.layers.Dense(self.action_dim, activation=\"tanh\", name=\"action\", kernel_initializer=RandomUniform(-w_range, w_range))(lr_1)\n\n model = keras.Model(inputs=obs_input, outputs=action)\n return model\n\n def act(self, obs):\n # 将状态转换为批量的形式\n obs = tf.reshape(obs, (-1, self.obs_dim))\n return self.model(obs)\n\n\nif __name__ == \"__main__\":\n actor = Actor(4, 1, 2)\n critic = Critic(4, 1)\n\n obs = np.random.rand(4)\n\n action = actor.act(obs)[0]\n q_val = critic.estimate_q(obs, action)[0]\n\n # keras.utils.plot_model(actor, 'actor.png', show_shapes=True)\n # keras.utils.plot_model(critic, 'critic.png', show_shapes=True)\n\n\n print(\"\\nRandom actor-critic output for obs={}:\".format(obs))\n print(\"Action: {}, Qval: {}\".format(action, q_val))\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
# 5. Усовершенствовать программу «Банковский депозит». Третьим аргументом в функцию должна
# передаваться фиксированная ежемесячная сумма пополнения вклада. Необходимо в главной
# функции реализовать вложенную функцию подсчета процентов для пополняемой суммы.
# Примем, что клиент вносит средства в последний день каждого месяца, кроме первого и
# последнего. Например, при сроке вклада в 6 месяцев пополнение происходит в течение 4
# месяцев. Вложенная функция возвращает сумму дополнительно внесенных средств (с
# процентами), а главная функция — общую сумму по вкладу на конец периода.
from task_1_4 import get_percent
def chargeable_deposit(amount, months, charge=0):
percent = get_percent(amount, months)
if not percent:
print('Нет подходящего тарифа')
total = amount
for month in range(months):
profit = total * percent / 100 / 12
total += profit
if month != 0 and month != months - 1:
total += charge + charge * percent / 100 / 12
print(round(total, 2))
chargeable_deposit(10000, 24, 100)
|
normal
|
{
"blob_id": "bf9e83591f737caec3060b72d86d56faec9bb23b",
"index": 8079,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\nchargeable_deposit(10000, 24, 100)\n",
"step-4": "from task_1_4 import get_percent\n\n\ndef chargeable_deposit(amount, months, charge=0):\n percent = get_percent(amount, months)\n if not percent:\n print('Нет подходящего тарифа')\n total = amount\n for month in range(months):\n profit = total * percent / 100 / 12\n total += profit\n if month != 0 and month != months - 1:\n total += charge + charge * percent / 100 / 12\n print(round(total, 2))\n\n\nchargeable_deposit(10000, 24, 100)\n",
"step-5": "# 5. Усовершенствовать программу «Банковский депозит». Третьим аргументом в функцию должна\r\n# передаваться фиксированная ежемесячная сумма пополнения вклада. Необходимо в главной\r\n# функции реализовать вложенную функцию подсчета процентов для пополняемой суммы.\r\n# Примем, что клиент вносит средства в последний день каждого месяца, кроме первого и\r\n# последнего. Например, при сроке вклада в 6 месяцев пополнение происходит в течение 4\r\n# месяцев. Вложенная функция возвращает сумму дополнительно внесенных средств (с\r\n# процентами), а главная функция — общую сумму по вкладу на конец периода.\r\n\r\nfrom task_1_4 import get_percent\r\n\r\n\r\ndef chargeable_deposit(amount, months, charge=0):\r\n percent = get_percent(amount, months)\r\n if not percent:\r\n print('Нет подходящего тарифа')\r\n\r\n total = amount\r\n for month in range(months):\r\n profit = total * percent / 100 / 12\r\n total += profit\r\n if month != 0 and month != months - 1:\r\n total += charge + charge * percent / 100 / 12\r\n\r\n print(round(total, 2))\r\n\r\n\r\nchargeable_deposit(10000, 24, 100)\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class player:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
<|reserved_special_token_0|>
def get_next_move(self):
"""
Asks user for next move
:return: board position
"""
return int(input('Enter your move: '))
<|reserved_special_token_1|>
class player:
def __init__(self, name: str, symbol: str):
self._name = name
self._symbol = symbol
def decide_next_move(self):
"""
Checks all possible combinations to decide best next move
:return: board position
"""
pass
def get_next_move(self):
"""
Asks user for next move
:return: board position
"""
return int(input('Enter your move: '))
|
flexible
|
{
"blob_id": "3cc894570189fe545f5db3150d0b69c16dc211dc",
"index": 981,
"step-1": "class player:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "class player:\n\n def __init__(self, name: str, symbol: str):\n self._name = name\n self._symbol = symbol\n <mask token>\n <mask token>\n",
"step-3": "class player:\n\n def __init__(self, name: str, symbol: str):\n self._name = name\n self._symbol = symbol\n <mask token>\n\n def get_next_move(self):\n \"\"\"\n Asks user for next move\n :return: board position\n \"\"\"\n return int(input('Enter your move: '))\n",
"step-4": "class player:\n\n def __init__(self, name: str, symbol: str):\n self._name = name\n self._symbol = symbol\n\n def decide_next_move(self):\n \"\"\"\n Checks all possible combinations to decide best next move\n :return: board position\n \"\"\"\n pass\n\n def get_next_move(self):\n \"\"\"\n Asks user for next move\n :return: board position\n \"\"\"\n return int(input('Enter your move: '))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def test_logsources_model(self):
"""
Comprobacion de que el modelo de la fuente de seguridad coincide con su asociado
Returns:
"""
log_source = LogSources.objects.get(Model='iptables v1.4.21')
self.assertEqual(log_source.get_model(), 'iptables v1.4.21')
<|reserved_special_token_1|>
def test_logsources_model(self):
"""
Comprobacion de que el modelo de la fuente de seguridad coincide con su asociado
Returns:
"""
log_source = LogSources.objects.get(Model="iptables v1.4.21")
self.assertEqual(log_source.get_model(), "iptables v1.4.21")
|
flexible
|
{
"blob_id": "c645461effe288a1959b783473d62ff99ca29547",
"index": 8746,
"step-1": "<mask token>\n",
"step-2": "def test_logsources_model(self):\n \"\"\"\n Comprobacion de que el modelo de la fuente de seguridad coincide con su asociado\n Returns:\n\n \"\"\"\n log_source = LogSources.objects.get(Model='iptables v1.4.21')\n self.assertEqual(log_source.get_model(), 'iptables v1.4.21')\n",
"step-3": "def test_logsources_model(self):\n \"\"\"\n Comprobacion de que el modelo de la fuente de seguridad coincide con su asociado\n Returns:\n\n \"\"\"\n log_source = LogSources.objects.get(Model=\"iptables v1.4.21\")\n self.assertEqual(log_source.get_model(), \"iptables v1.4.21\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
class Weather(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
pressure = db.Column(db.Float)
inTemp = db.Column(db.Float)
outTemp = db.Column(db.Float)
windDir = db.Column(db.Float)
windSpeed = db.Column(db.Float)
outHumidity = db.Column(db.Float)
inHumidity = db.Column(db.Float)
rain = db.Column(db.Float)
def save(self):
db.session.add(self)
db.session.commit()
class Webcam(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
data = db.Column(db.LargeBinary)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
class Weather(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
pressure = db.Column(db.Float)
inTemp = db.Column(db.Float)
outTemp = db.Column(db.Float)
windDir = db.Column(db.Float)
windSpeed = db.Column(db.Float)
outHumidity = db.Column(db.Float)
inHumidity = db.Column(db.Float)
rain = db.Column(db.Float)
def save(self):
db.session.add(self)
db.session.commit()
class Webcam(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
data = db.Column(db.LargeBinary)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def setup_api_user():
username = 'weatherstation'
password = 'umevohvoori2zaew2choKaeshooPho'
if User.query.filter_by(username=username).first() is not None:
return
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
logger.info('User created')
def init_db(app):
db.app = app
db.init_app(app)
db.create_all()
setup_api_user()
if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:
import_from_json()
return db
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__name__)
db = SQLAlchemy()
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
class Weather(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
pressure = db.Column(db.Float)
inTemp = db.Column(db.Float)
outTemp = db.Column(db.Float)
windDir = db.Column(db.Float)
windSpeed = db.Column(db.Float)
outHumidity = db.Column(db.Float)
inHumidity = db.Column(db.Float)
rain = db.Column(db.Float)
def save(self):
db.session.add(self)
db.session.commit()
class Webcam(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
data = db.Column(db.LargeBinary)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def setup_api_user():
username = 'weatherstation'
password = 'umevohvoori2zaew2choKaeshooPho'
if User.query.filter_by(username=username).first() is not None:
return
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
logger.info('User created')
def init_db(app):
db.app = app
db.init_app(app)
db.create_all()
setup_api_user()
if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:
import_from_json()
return db
def import_from_json():
pass
<|reserved_special_token_1|>
from flask import logging
from flask_sqlalchemy import SQLAlchemy
from passlib.apps import custom_app_context as pwd_context
logger = logging.getLogger(__name__)
db = SQLAlchemy()
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
class Weather(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
pressure = db.Column(db.Float)
inTemp = db.Column(db.Float)
outTemp = db.Column(db.Float)
windDir = db.Column(db.Float)
windSpeed = db.Column(db.Float)
outHumidity = db.Column(db.Float)
inHumidity = db.Column(db.Float)
rain = db.Column(db.Float)
def save(self):
db.session.add(self)
db.session.commit()
class Webcam(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
data = db.Column(db.LargeBinary)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def setup_api_user():
username = 'weatherstation'
password = 'umevohvoori2zaew2choKaeshooPho'
if User.query.filter_by(username=username).first() is not None:
return
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
logger.info('User created')
def init_db(app):
db.app = app
db.init_app(app)
db.create_all()
setup_api_user()
if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:
import_from_json()
return db
def import_from_json():
pass
<|reserved_special_token_1|>
from flask import logging
from flask_sqlalchemy import SQLAlchemy
from passlib.apps import custom_app_context as pwd_context
logger = logging.getLogger(__name__)
db = SQLAlchemy() # flask-sqlalchemy
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
class Weather(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
pressure = db.Column(db.Float)
inTemp = db.Column(db.Float)
outTemp = db.Column(db.Float)
windDir = db.Column(db.Float)
windSpeed = db.Column(db.Float)
outHumidity = db.Column(db.Float)
inHumidity = db.Column(db.Float)
rain = db.Column(db.Float)
def save(self):
db.session.add(self)
db.session.commit()
class Webcam(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(timezone=True))
data = db.Column(db.LargeBinary)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def setup_api_user():
username = "weatherstation"
password = "umevohvoori2zaew2choKaeshooPho"
if User.query.filter_by(username=username).first() is not None:
return
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
logger.info("User created")
def init_db(app):
db.app = app
db.init_app(app)
db.create_all()
setup_api_user()
if app.config["SQLALCHEMY_BOOTSTRAP_DATA"]:
import_from_json()
return db
def import_from_json():
pass
|
flexible
|
{
"blob_id": "e976f7e423d75f7fc8a3d5cd597bdd9358ae317e",
"index": 5243,
"step-1": "<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(32), index=True)\n password_hash = db.Column(db.String(128))\n\n def hash_password(self, password):\n self.password_hash = pwd_context.encrypt(password)\n\n def verify_password(self, password):\n return pwd_context.verify(password, self.password_hash)\n\n\nclass Weather(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n pressure = db.Column(db.Float)\n inTemp = db.Column(db.Float)\n outTemp = db.Column(db.Float)\n windDir = db.Column(db.Float)\n windSpeed = db.Column(db.Float)\n outHumidity = db.Column(db.Float)\n inHumidity = db.Column(db.Float)\n rain = db.Column(db.Float)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\nclass Webcam(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n data = db.Column(db.LargeBinary)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(32), index=True)\n password_hash = db.Column(db.String(128))\n\n def hash_password(self, password):\n self.password_hash = pwd_context.encrypt(password)\n\n def verify_password(self, password):\n return pwd_context.verify(password, self.password_hash)\n\n\nclass Weather(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n pressure = db.Column(db.Float)\n inTemp = db.Column(db.Float)\n outTemp = db.Column(db.Float)\n windDir = db.Column(db.Float)\n windSpeed = db.Column(db.Float)\n outHumidity = db.Column(db.Float)\n inHumidity = db.Column(db.Float)\n rain = db.Column(db.Float)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\nclass Webcam(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n data = db.Column(db.LargeBinary)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n\ndef setup_api_user():\n username = 'weatherstation'\n password = 'umevohvoori2zaew2choKaeshooPho'\n if User.query.filter_by(username=username).first() is not None:\n return\n user = User(username=username)\n user.hash_password(password)\n db.session.add(user)\n db.session.commit()\n logger.info('User created')\n\n\ndef init_db(app):\n db.app = app\n db.init_app(app)\n db.create_all()\n setup_api_user()\n if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:\n import_from_json()\n return db\n\n\n<mask token>\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__name__)\ndb = SQLAlchemy()\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(32), index=True)\n password_hash = db.Column(db.String(128))\n\n def hash_password(self, password):\n self.password_hash = pwd_context.encrypt(password)\n\n def verify_password(self, password):\n return pwd_context.verify(password, self.password_hash)\n\n\nclass Weather(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n pressure = db.Column(db.Float)\n inTemp = db.Column(db.Float)\n outTemp = db.Column(db.Float)\n windDir = db.Column(db.Float)\n windSpeed = db.Column(db.Float)\n outHumidity = db.Column(db.Float)\n inHumidity = db.Column(db.Float)\n rain = db.Column(db.Float)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\nclass Webcam(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n data = db.Column(db.LargeBinary)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n\ndef setup_api_user():\n username = 'weatherstation'\n password = 'umevohvoori2zaew2choKaeshooPho'\n if User.query.filter_by(username=username).first() is not None:\n return\n user = User(username=username)\n user.hash_password(password)\n db.session.add(user)\n db.session.commit()\n logger.info('User created')\n\n\ndef init_db(app):\n db.app = app\n db.init_app(app)\n db.create_all()\n setup_api_user()\n if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:\n import_from_json()\n return db\n\n\ndef import_from_json():\n pass\n",
"step-4": "from flask import logging\nfrom flask_sqlalchemy import SQLAlchemy\nfrom passlib.apps import custom_app_context as pwd_context\nlogger = logging.getLogger(__name__)\ndb = SQLAlchemy()\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(32), index=True)\n password_hash = db.Column(db.String(128))\n\n def hash_password(self, password):\n self.password_hash = pwd_context.encrypt(password)\n\n def verify_password(self, password):\n return pwd_context.verify(password, self.password_hash)\n\n\nclass Weather(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n pressure = db.Column(db.Float)\n inTemp = db.Column(db.Float)\n outTemp = db.Column(db.Float)\n windDir = db.Column(db.Float)\n windSpeed = db.Column(db.Float)\n outHumidity = db.Column(db.Float)\n inHumidity = db.Column(db.Float)\n rain = db.Column(db.Float)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\nclass Webcam(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n data = db.Column(db.LargeBinary)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n\ndef setup_api_user():\n username = 'weatherstation'\n password = 'umevohvoori2zaew2choKaeshooPho'\n if User.query.filter_by(username=username).first() is not None:\n return\n user = User(username=username)\n user.hash_password(password)\n db.session.add(user)\n db.session.commit()\n logger.info('User created')\n\n\ndef init_db(app):\n db.app = app\n db.init_app(app)\n db.create_all()\n setup_api_user()\n if app.config['SQLALCHEMY_BOOTSTRAP_DATA']:\n import_from_json()\n return db\n\n\ndef import_from_json():\n pass\n",
"step-5": "from flask import logging\nfrom flask_sqlalchemy import SQLAlchemy\nfrom passlib.apps import custom_app_context as pwd_context\n\nlogger = logging.getLogger(__name__)\n\ndb = SQLAlchemy() # flask-sqlalchemy\n\n\nclass User(db.Model):\n __tablename__ = 'users'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(32), index=True)\n password_hash = db.Column(db.String(128))\n\n def hash_password(self, password):\n self.password_hash = pwd_context.encrypt(password)\n\n def verify_password(self, password):\n return pwd_context.verify(password, self.password_hash)\n\n\nclass Weather(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n pressure = db.Column(db.Float)\n inTemp = db.Column(db.Float)\n outTemp = db.Column(db.Float)\n windDir = db.Column(db.Float)\n windSpeed = db.Column(db.Float)\n outHumidity = db.Column(db.Float)\n inHumidity = db.Column(db.Float)\n rain = db.Column(db.Float)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\nclass Webcam(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n timestamp = db.Column(db.DateTime(timezone=True))\n data = db.Column(db.LargeBinary)\n\n def save(self):\n db.session.add(self)\n db.session.commit()\n\n\n def delete(self):\n db.session.delete(self)\n db.session.commit()\n\n\ndef setup_api_user():\n username = \"weatherstation\"\n password = \"umevohvoori2zaew2choKaeshooPho\"\n if User.query.filter_by(username=username).first() is not None:\n return\n user = User(username=username)\n user.hash_password(password)\n db.session.add(user)\n db.session.commit()\n logger.info(\"User created\")\n\n\ndef init_db(app):\n db.app = app\n db.init_app(app)\n db.create_all()\n setup_api_user()\n\n if app.config[\"SQLALCHEMY_BOOTSTRAP_DATA\"]:\n import_from_json()\n\n return db\n\n\ndef import_from_json():\n pass\n",
"step-ids": [
11,
13,
15,
16,
17
]
}
|
[
11,
13,
15,
16,
17
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
__version__ = '0.6.1'
<|reserved_special_token_1|>
# Global version information
__version__ = "0.6.1"
|
flexible
|
{
"blob_id": "8aeb7786984f27fabdcaffa54f52eb868c277fdb",
"index": 7707,
"step-1": "<mask token>\n",
"step-2": "__version__ = '0.6.1'\n",
"step-3": "# Global version information\n__version__ = \"0.6.1\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def test(config, base, loaders, brief):
compute_and_save_features(base, loaders)
results = evalutate(config, base, brief)
return results
def evalutate(config, base, brief=False):
results = {}
for mode in config.modes:
print(mode)
for number_shot in config.number_shots:
print(number_shot)
cmc, map = evaluate_sysymm01(base.save_features_path, mode,
number_shot)
results['{},{}'.format(mode, number_shot)] = [cmc, map]
if brief:
break
if brief:
break
return results
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test(config, base, loaders, brief):
compute_and_save_features(base, loaders)
results = evalutate(config, base, brief)
return results
def evalutate(config, base, brief=False):
results = {}
for mode in config.modes:
print(mode)
for number_shot in config.number_shots:
print(number_shot)
cmc, map = evaluate_sysymm01(base.save_features_path, mode,
number_shot)
results['{},{}'.format(mode, number_shot)] = [cmc, map]
if brief:
break
if brief:
break
return results
def compute_and_save_features(base, loaders):
def compute_features(images):
images_f = fliplr(images)
images = images.to(base.device)
images_f = images_f.to(base.device)
features = base.encoder(base.process_images_4_encoder(images, True,
True))
features_f = base.encoder(base.process_images_4_encoder(images_f,
True, True))
features, _, _, _ = base.embeder(features)
features_f, _, _, _ = base.embeder(features_f)
features = features + features_f
if base.part_num == 1:
features = torch.unsqueeze(features, -1)
return features
def normalize_and_resize_feature(features):
norm = torch.norm(features, dim=1, keepdim=True)
features = features / norm.repeat([1, features.size(1), 1])
features = features.view(features.size(0), -1)
return features
class XX:
def __init__(self):
self.val = {}
def update(self, key, value):
if key not in self.val.keys():
self.val[key] = value
else:
self.val[key] = np.concatenate([self.val[key], value], axis=0)
def get_val(self, key):
if key in self.val.keys():
return self.val[key]
else:
return np.array([[]])
print('Time:{}. Start to compute features'.format(time_now()))
base.set_eval()
features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()
with torch.no_grad():
for i, data in enumerate(loaders.rgb_all_loader):
images, pids, cids, _ = data
images = base.G_rgb2ir(images.to(base.device)).data.cpu()
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
for i, data in enumerate(loaders.ir_all_loader):
images, pids, cids, _ = data
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
print('Time:{}. Start to normalize features.'.format(time_now()))
features = features_meter.get_val()
features = normalize_and_resize_feature(features)
features = features.data.cpu().numpy()
pids = pids_meter.get_val_numpy()
cids = cids_meter.get_val_numpy()
print('Time: {}. Note: Start to save features as .mat file'.format(
time_now()))
results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()
}
for i in range(features.shape[0]):
feature = features[i, :]
feature = np.resize(feature, [1, feature.shape[0]])
cid, pid = cids[i], pids[i]
results[cid].update(pid, feature)
pid_num_of_cids = [333, 333, 533, 533, 533, 333]
cids = [1, 2, 3, 4, 5, 6]
for cid in cids:
a_result = results[cid]
xx = []
for pid in range(1, 1 + pid_num_of_cids[cid - 1]):
xx.append([a_result.get_val(pid).astype(np.double)])
xx = np.array(xx)
sio.savemat(os.path.join(base.save_features_path,
'feature_cam{}.mat'.format(cid)), {'feature': xx})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test(config, base, loaders, brief):
compute_and_save_features(base, loaders)
results = evalutate(config, base, brief)
return results
def evalutate(config, base, brief=False):
results = {}
for mode in config.modes:
print(mode)
for number_shot in config.number_shots:
print(number_shot)
cmc, map = evaluate_sysymm01(base.save_features_path, mode,
number_shot)
results['{},{}'.format(mode, number_shot)] = [cmc, map]
if brief:
break
if brief:
break
return results
def compute_and_save_features(base, loaders):
def compute_features(images):
images_f = fliplr(images)
images = images.to(base.device)
images_f = images_f.to(base.device)
features = base.encoder(base.process_images_4_encoder(images, True,
True))
features_f = base.encoder(base.process_images_4_encoder(images_f,
True, True))
features, _, _, _ = base.embeder(features)
features_f, _, _, _ = base.embeder(features_f)
features = features + features_f
if base.part_num == 1:
features = torch.unsqueeze(features, -1)
return features
def normalize_and_resize_feature(features):
norm = torch.norm(features, dim=1, keepdim=True)
features = features / norm.repeat([1, features.size(1), 1])
features = features.view(features.size(0), -1)
return features
class XX:
def __init__(self):
self.val = {}
def update(self, key, value):
if key not in self.val.keys():
self.val[key] = value
else:
self.val[key] = np.concatenate([self.val[key], value], axis=0)
def get_val(self, key):
if key in self.val.keys():
return self.val[key]
else:
return np.array([[]])
print('Time:{}. Start to compute features'.format(time_now()))
base.set_eval()
features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()
with torch.no_grad():
for i, data in enumerate(loaders.rgb_all_loader):
images, pids, cids, _ = data
images = base.G_rgb2ir(images.to(base.device)).data.cpu()
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
for i, data in enumerate(loaders.ir_all_loader):
images, pids, cids, _ = data
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
print('Time:{}. Start to normalize features.'.format(time_now()))
features = features_meter.get_val()
features = normalize_and_resize_feature(features)
features = features.data.cpu().numpy()
pids = pids_meter.get_val_numpy()
cids = cids_meter.get_val_numpy()
print('Time: {}. Note: Start to save features as .mat file'.format(
time_now()))
results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()
}
for i in range(features.shape[0]):
feature = features[i, :]
feature = np.resize(feature, [1, feature.shape[0]])
cid, pid = cids[i], pids[i]
results[cid].update(pid, feature)
pid_num_of_cids = [333, 333, 533, 533, 533, 333]
cids = [1, 2, 3, 4, 5, 6]
for cid in cids:
a_result = results[cid]
xx = []
for pid in range(1, 1 + pid_num_of_cids[cid - 1]):
xx.append([a_result.get_val(pid).astype(np.double)])
xx = np.array(xx)
sio.savemat(os.path.join(base.save_features_path,
'feature_cam{}.mat'.format(cid)), {'feature': xx})
def save_images(base, current_step):
with torch.no_grad():
fixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images
).detach()
xxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images,
base.fixed_real_ir_images], dim=0)
save_image((xxxx.data.cpu() + 1.0) / 2.0, os.path.join(base.
save_images_path, 'image_{}.jpg'.format(current_step)), nrow=
base.fixed_real_rgb_images.size(0), padding=0)
<|reserved_special_token_1|>
import numpy as np
import scipy.io as sio
import os
import torch
from torchvision.utils import save_image
from tools import *
def test(config, base, loaders, brief):
compute_and_save_features(base, loaders)
results = evalutate(config, base, brief)
return results
def evalutate(config, base, brief=False):
results = {}
for mode in config.modes:
print(mode)
for number_shot in config.number_shots:
print(number_shot)
cmc, map = evaluate_sysymm01(base.save_features_path, mode,
number_shot)
results['{},{}'.format(mode, number_shot)] = [cmc, map]
if brief:
break
if brief:
break
return results
def compute_and_save_features(base, loaders):
def compute_features(images):
images_f = fliplr(images)
images = images.to(base.device)
images_f = images_f.to(base.device)
features = base.encoder(base.process_images_4_encoder(images, True,
True))
features_f = base.encoder(base.process_images_4_encoder(images_f,
True, True))
features, _, _, _ = base.embeder(features)
features_f, _, _, _ = base.embeder(features_f)
features = features + features_f
if base.part_num == 1:
features = torch.unsqueeze(features, -1)
return features
def normalize_and_resize_feature(features):
norm = torch.norm(features, dim=1, keepdim=True)
features = features / norm.repeat([1, features.size(1), 1])
features = features.view(features.size(0), -1)
return features
class XX:
def __init__(self):
self.val = {}
def update(self, key, value):
if key not in self.val.keys():
self.val[key] = value
else:
self.val[key] = np.concatenate([self.val[key], value], axis=0)
def get_val(self, key):
if key in self.val.keys():
return self.val[key]
else:
return np.array([[]])
print('Time:{}. Start to compute features'.format(time_now()))
base.set_eval()
features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()
with torch.no_grad():
for i, data in enumerate(loaders.rgb_all_loader):
images, pids, cids, _ = data
images = base.G_rgb2ir(images.to(base.device)).data.cpu()
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
for i, data in enumerate(loaders.ir_all_loader):
images, pids, cids, _ = data
features = compute_features(images)
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
print('Time:{}. Start to normalize features.'.format(time_now()))
features = features_meter.get_val()
features = normalize_and_resize_feature(features)
features = features.data.cpu().numpy()
pids = pids_meter.get_val_numpy()
cids = cids_meter.get_val_numpy()
print('Time: {}. Note: Start to save features as .mat file'.format(
time_now()))
results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()
}
for i in range(features.shape[0]):
feature = features[i, :]
feature = np.resize(feature, [1, feature.shape[0]])
cid, pid = cids[i], pids[i]
results[cid].update(pid, feature)
pid_num_of_cids = [333, 333, 533, 533, 533, 333]
cids = [1, 2, 3, 4, 5, 6]
for cid in cids:
a_result = results[cid]
xx = []
for pid in range(1, 1 + pid_num_of_cids[cid - 1]):
xx.append([a_result.get_val(pid).astype(np.double)])
xx = np.array(xx)
sio.savemat(os.path.join(base.save_features_path,
'feature_cam{}.mat'.format(cid)), {'feature': xx})
def save_images(base, current_step):
with torch.no_grad():
fixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images
).detach()
xxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images,
base.fixed_real_ir_images], dim=0)
save_image((xxxx.data.cpu() + 1.0) / 2.0, os.path.join(base.
save_images_path, 'image_{}.jpg'.format(current_step)), nrow=
base.fixed_real_rgb_images.size(0), padding=0)
<|reserved_special_token_1|>
import numpy as np
import scipy.io as sio
import os
import torch
from torchvision.utils import save_image
from tools import *
def test(config, base, loaders, brief):
compute_and_save_features(base, loaders)
results = evalutate(config, base, brief)
return results
def evalutate(config, base, brief=False):
results = {}
for mode in config.modes:
print(mode)
for number_shot in config.number_shots:
print(number_shot)
cmc, map = evaluate_sysymm01(base.save_features_path, mode, number_shot)
results['{},{}'.format(mode, number_shot)] = [cmc, map]
if brief: break
if brief: break
return results
def compute_and_save_features(base, loaders):
def compute_features(images):
images_f = fliplr(images)
images = images.to(base.device)
images_f = images_f.to(base.device)
features = base.encoder(base.process_images_4_encoder(images, True, True))
features_f = base.encoder(base.process_images_4_encoder(images_f, True, True))
features, _, _, _ = base.embeder(features)
features_f, _, _, _ = base.embeder(features_f)
features = features + features_f
if base.part_num == 1:
features = torch.unsqueeze(features, -1)
return features
def normalize_and_resize_feature(features):
# normlize
norm = torch.norm(features, dim=1, keepdim=True)
features = features / norm.repeat([1, features.size(1), 1])
# resize
features = features.view(features.size(0), -1)
return features
class XX:
def __init__(self):
self.val = {}
def update(self, key, value):
if key not in self.val.keys():
self.val[key] = value
else:
self.val[key] = np.concatenate([self.val[key], value], axis=0)
def get_val(self, key):
if key in self.val.keys():
return self.val[key]
else:
return np.array([[]])
print('Time:{}. Start to compute features'.format(time_now()))
# compute features
# base._resume_model(test_step)
base.set_eval()
features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()
with torch.no_grad():
for i, data in enumerate(loaders.rgb_all_loader):
# load data
images, pids, cids, _ = data
images = base.G_rgb2ir(images.to(base.device)).data.cpu()
# forward
features = compute_features(images)
# meter
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
for i, data in enumerate(loaders.ir_all_loader):
# load data
images, pids, cids, _ = data
# forward
features = compute_features(images)
# meter
features_meter.update(features.data)
pids_meter.update(pids.data)
cids_meter.update(cids.data)
print('Time:{}. Start to normalize features.'.format(time_now()))
# normalize features
features = features_meter.get_val()
features = normalize_and_resize_feature(features)
features = features.data.cpu().numpy()
pids = pids_meter.get_val_numpy()
cids = cids_meter.get_val_numpy()
print('Time: {}. Note: Start to save features as .mat file'.format(time_now()))
# save features as .mat file
results = {1: XX(), 2: XX(), 3: XX(), 4: XX(), 5: XX(), 6: XX()}
for i in range(features.shape[0]):
feature = features[i, :]
feature = np.resize(feature, [1, feature.shape[0]])
cid, pid = cids[i], pids[i]
results[cid].update(pid, feature)
pid_num_of_cids = [333, 333, 533, 533, 533, 333]
cids = [1, 2, 3, 4, 5, 6]
for cid in cids:
a_result = results[cid]
xx = []
for pid in range(1, 1+ pid_num_of_cids[cid - 1]):
xx.append([a_result.get_val(pid).astype(np.double)])
xx = np.array(xx)
sio.savemat(os.path.join(base.save_features_path, 'feature_cam{}.mat'.format(cid)), {'feature': xx})
def save_images(base, current_step):
#base.set_eval()
with torch.no_grad():
fixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images).detach()
xxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images, base.fixed_real_ir_images], dim=0)
save_image((xxxx.data.cpu() + 1.0) / 2.0,
os.path.join(base.save_images_path, 'image_{}.jpg'.format(current_step)), nrow=base.fixed_real_rgb_images.size(0), padding=0)
|
flexible
|
{
"blob_id": "b21796a9e10314f80cac3151d1fdbb139966303f",
"index": 5555,
"step-1": "<mask token>\n\n\ndef test(config, base, loaders, brief):\n compute_and_save_features(base, loaders)\n results = evalutate(config, base, brief)\n return results\n\n\ndef evalutate(config, base, brief=False):\n results = {}\n for mode in config.modes:\n print(mode)\n for number_shot in config.number_shots:\n print(number_shot)\n cmc, map = evaluate_sysymm01(base.save_features_path, mode,\n number_shot)\n results['{},{}'.format(mode, number_shot)] = [cmc, map]\n if brief:\n break\n if brief:\n break\n return results\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef test(config, base, loaders, brief):\n compute_and_save_features(base, loaders)\n results = evalutate(config, base, brief)\n return results\n\n\ndef evalutate(config, base, brief=False):\n results = {}\n for mode in config.modes:\n print(mode)\n for number_shot in config.number_shots:\n print(number_shot)\n cmc, map = evaluate_sysymm01(base.save_features_path, mode,\n number_shot)\n results['{},{}'.format(mode, number_shot)] = [cmc, map]\n if brief:\n break\n if brief:\n break\n return results\n\n\ndef compute_and_save_features(base, loaders):\n\n def compute_features(images):\n images_f = fliplr(images)\n images = images.to(base.device)\n images_f = images_f.to(base.device)\n features = base.encoder(base.process_images_4_encoder(images, True,\n True))\n features_f = base.encoder(base.process_images_4_encoder(images_f, \n True, True))\n features, _, _, _ = base.embeder(features)\n features_f, _, _, _ = base.embeder(features_f)\n features = features + features_f\n if base.part_num == 1:\n features = torch.unsqueeze(features, -1)\n return features\n\n def normalize_and_resize_feature(features):\n norm = torch.norm(features, dim=1, keepdim=True)\n features = features / norm.repeat([1, features.size(1), 1])\n features = features.view(features.size(0), -1)\n return features\n\n\n class XX:\n\n def __init__(self):\n self.val = {}\n\n def update(self, key, value):\n if key not in self.val.keys():\n self.val[key] = value\n else:\n self.val[key] = np.concatenate([self.val[key], value], axis=0)\n\n def get_val(self, key):\n if key in self.val.keys():\n return self.val[key]\n else:\n return np.array([[]])\n print('Time:{}. Start to compute features'.format(time_now()))\n base.set_eval()\n features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()\n with torch.no_grad():\n for i, data in enumerate(loaders.rgb_all_loader):\n images, pids, cids, _ = data\n images = base.G_rgb2ir(images.to(base.device)).data.cpu()\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n for i, data in enumerate(loaders.ir_all_loader):\n images, pids, cids, _ = data\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n print('Time:{}. Start to normalize features.'.format(time_now()))\n features = features_meter.get_val()\n features = normalize_and_resize_feature(features)\n features = features.data.cpu().numpy()\n pids = pids_meter.get_val_numpy()\n cids = cids_meter.get_val_numpy()\n print('Time: {}. Note: Start to save features as .mat file'.format(\n time_now()))\n results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()\n }\n for i in range(features.shape[0]):\n feature = features[i, :]\n feature = np.resize(feature, [1, feature.shape[0]])\n cid, pid = cids[i], pids[i]\n results[cid].update(pid, feature)\n pid_num_of_cids = [333, 333, 533, 533, 533, 333]\n cids = [1, 2, 3, 4, 5, 6]\n for cid in cids:\n a_result = results[cid]\n xx = []\n for pid in range(1, 1 + pid_num_of_cids[cid - 1]):\n xx.append([a_result.get_val(pid).astype(np.double)])\n xx = np.array(xx)\n sio.savemat(os.path.join(base.save_features_path,\n 'feature_cam{}.mat'.format(cid)), {'feature': xx})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef test(config, base, loaders, brief):\n compute_and_save_features(base, loaders)\n results = evalutate(config, base, brief)\n return results\n\n\ndef evalutate(config, base, brief=False):\n results = {}\n for mode in config.modes:\n print(mode)\n for number_shot in config.number_shots:\n print(number_shot)\n cmc, map = evaluate_sysymm01(base.save_features_path, mode,\n number_shot)\n results['{},{}'.format(mode, number_shot)] = [cmc, map]\n if brief:\n break\n if brief:\n break\n return results\n\n\ndef compute_and_save_features(base, loaders):\n\n def compute_features(images):\n images_f = fliplr(images)\n images = images.to(base.device)\n images_f = images_f.to(base.device)\n features = base.encoder(base.process_images_4_encoder(images, True,\n True))\n features_f = base.encoder(base.process_images_4_encoder(images_f, \n True, True))\n features, _, _, _ = base.embeder(features)\n features_f, _, _, _ = base.embeder(features_f)\n features = features + features_f\n if base.part_num == 1:\n features = torch.unsqueeze(features, -1)\n return features\n\n def normalize_and_resize_feature(features):\n norm = torch.norm(features, dim=1, keepdim=True)\n features = features / norm.repeat([1, features.size(1), 1])\n features = features.view(features.size(0), -1)\n return features\n\n\n class XX:\n\n def __init__(self):\n self.val = {}\n\n def update(self, key, value):\n if key not in self.val.keys():\n self.val[key] = value\n else:\n self.val[key] = np.concatenate([self.val[key], value], axis=0)\n\n def get_val(self, key):\n if key in self.val.keys():\n return self.val[key]\n else:\n return np.array([[]])\n print('Time:{}. Start to compute features'.format(time_now()))\n base.set_eval()\n features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()\n with torch.no_grad():\n for i, data in enumerate(loaders.rgb_all_loader):\n images, pids, cids, _ = data\n images = base.G_rgb2ir(images.to(base.device)).data.cpu()\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n for i, data in enumerate(loaders.ir_all_loader):\n images, pids, cids, _ = data\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n print('Time:{}. Start to normalize features.'.format(time_now()))\n features = features_meter.get_val()\n features = normalize_and_resize_feature(features)\n features = features.data.cpu().numpy()\n pids = pids_meter.get_val_numpy()\n cids = cids_meter.get_val_numpy()\n print('Time: {}. Note: Start to save features as .mat file'.format(\n time_now()))\n results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()\n }\n for i in range(features.shape[0]):\n feature = features[i, :]\n feature = np.resize(feature, [1, feature.shape[0]])\n cid, pid = cids[i], pids[i]\n results[cid].update(pid, feature)\n pid_num_of_cids = [333, 333, 533, 533, 533, 333]\n cids = [1, 2, 3, 4, 5, 6]\n for cid in cids:\n a_result = results[cid]\n xx = []\n for pid in range(1, 1 + pid_num_of_cids[cid - 1]):\n xx.append([a_result.get_val(pid).astype(np.double)])\n xx = np.array(xx)\n sio.savemat(os.path.join(base.save_features_path,\n 'feature_cam{}.mat'.format(cid)), {'feature': xx})\n\n\ndef save_images(base, current_step):\n with torch.no_grad():\n fixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images\n ).detach()\n xxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images,\n base.fixed_real_ir_images], dim=0)\n save_image((xxxx.data.cpu() + 1.0) / 2.0, os.path.join(base.\n save_images_path, 'image_{}.jpg'.format(current_step)), nrow=\n base.fixed_real_rgb_images.size(0), padding=0)\n",
"step-4": "import numpy as np\nimport scipy.io as sio\nimport os\nimport torch\nfrom torchvision.utils import save_image\nfrom tools import *\n\n\ndef test(config, base, loaders, brief):\n compute_and_save_features(base, loaders)\n results = evalutate(config, base, brief)\n return results\n\n\ndef evalutate(config, base, brief=False):\n results = {}\n for mode in config.modes:\n print(mode)\n for number_shot in config.number_shots:\n print(number_shot)\n cmc, map = evaluate_sysymm01(base.save_features_path, mode,\n number_shot)\n results['{},{}'.format(mode, number_shot)] = [cmc, map]\n if brief:\n break\n if brief:\n break\n return results\n\n\ndef compute_and_save_features(base, loaders):\n\n def compute_features(images):\n images_f = fliplr(images)\n images = images.to(base.device)\n images_f = images_f.to(base.device)\n features = base.encoder(base.process_images_4_encoder(images, True,\n True))\n features_f = base.encoder(base.process_images_4_encoder(images_f, \n True, True))\n features, _, _, _ = base.embeder(features)\n features_f, _, _, _ = base.embeder(features_f)\n features = features + features_f\n if base.part_num == 1:\n features = torch.unsqueeze(features, -1)\n return features\n\n def normalize_and_resize_feature(features):\n norm = torch.norm(features, dim=1, keepdim=True)\n features = features / norm.repeat([1, features.size(1), 1])\n features = features.view(features.size(0), -1)\n return features\n\n\n class XX:\n\n def __init__(self):\n self.val = {}\n\n def update(self, key, value):\n if key not in self.val.keys():\n self.val[key] = value\n else:\n self.val[key] = np.concatenate([self.val[key], value], axis=0)\n\n def get_val(self, key):\n if key in self.val.keys():\n return self.val[key]\n else:\n return np.array([[]])\n print('Time:{}. Start to compute features'.format(time_now()))\n base.set_eval()\n features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()\n with torch.no_grad():\n for i, data in enumerate(loaders.rgb_all_loader):\n images, pids, cids, _ = data\n images = base.G_rgb2ir(images.to(base.device)).data.cpu()\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n for i, data in enumerate(loaders.ir_all_loader):\n images, pids, cids, _ = data\n features = compute_features(images)\n features_meter.update(features.data)\n pids_meter.update(pids.data)\n cids_meter.update(cids.data)\n print('Time:{}. Start to normalize features.'.format(time_now()))\n features = features_meter.get_val()\n features = normalize_and_resize_feature(features)\n features = features.data.cpu().numpy()\n pids = pids_meter.get_val_numpy()\n cids = cids_meter.get_val_numpy()\n print('Time: {}. Note: Start to save features as .mat file'.format(\n time_now()))\n results = {(1): XX(), (2): XX(), (3): XX(), (4): XX(), (5): XX(), (6): XX()\n }\n for i in range(features.shape[0]):\n feature = features[i, :]\n feature = np.resize(feature, [1, feature.shape[0]])\n cid, pid = cids[i], pids[i]\n results[cid].update(pid, feature)\n pid_num_of_cids = [333, 333, 533, 533, 533, 333]\n cids = [1, 2, 3, 4, 5, 6]\n for cid in cids:\n a_result = results[cid]\n xx = []\n for pid in range(1, 1 + pid_num_of_cids[cid - 1]):\n xx.append([a_result.get_val(pid).astype(np.double)])\n xx = np.array(xx)\n sio.savemat(os.path.join(base.save_features_path,\n 'feature_cam{}.mat'.format(cid)), {'feature': xx})\n\n\ndef save_images(base, current_step):\n with torch.no_grad():\n fixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images\n ).detach()\n xxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images,\n base.fixed_real_ir_images], dim=0)\n save_image((xxxx.data.cpu() + 1.0) / 2.0, os.path.join(base.\n save_images_path, 'image_{}.jpg'.format(current_step)), nrow=\n base.fixed_real_rgb_images.size(0), padding=0)\n",
"step-5": "import numpy as np\nimport scipy.io as sio\nimport os\n\nimport torch\nfrom torchvision.utils import save_image\n\nfrom tools import *\n\n\n\ndef test(config, base, loaders, brief):\n\n\tcompute_and_save_features(base, loaders)\n\tresults = evalutate(config, base, brief)\n\treturn results\n\n\ndef evalutate(config, base, brief=False):\n\n\tresults = {}\n\tfor mode in config.modes:\n\t\tprint(mode)\n\t\tfor number_shot in config.number_shots:\n\t\t\tprint(number_shot)\n\t\t\tcmc, map = evaluate_sysymm01(base.save_features_path, mode, number_shot)\n\t\t\tresults['{},{}'.format(mode, number_shot)] = [cmc, map]\n\t\t\tif brief: break\n\t\tif brief: break\n\n\treturn results\n\n\ndef compute_and_save_features(base, loaders):\n\n\tdef compute_features(images):\n\t\timages_f = fliplr(images)\n\t\timages = images.to(base.device)\n\t\timages_f = images_f.to(base.device)\n\t\tfeatures = base.encoder(base.process_images_4_encoder(images, True, True))\n\t\tfeatures_f = base.encoder(base.process_images_4_encoder(images_f, True, True))\n\t\tfeatures, _, _, _ = base.embeder(features)\n\t\tfeatures_f, _, _, _ = base.embeder(features_f)\n\t\tfeatures = features + features_f\n\t\tif base.part_num == 1:\n\t\t\tfeatures = torch.unsqueeze(features, -1)\n\t\treturn features\n\n\tdef normalize_and_resize_feature(features):\n\t\t# normlize\n\t\tnorm = torch.norm(features, dim=1, keepdim=True)\n\t\tfeatures = features / norm.repeat([1, features.size(1), 1])\n\t\t# resize\n\t\tfeatures = features.view(features.size(0), -1)\n\t\treturn features\n\n\tclass XX:\n\t\tdef __init__(self):\n\t\t\tself.val = {}\n\t\tdef update(self, key, value):\n\t\t\tif key not in self.val.keys():\n\t\t\t\tself.val[key] = value\n\t\t\telse:\n\t\t\t\tself.val[key] = np.concatenate([self.val[key], value], axis=0)\n\t\tdef get_val(self, key):\n\t\t\tif key in self.val.keys():\n\t\t\t\treturn self.val[key]\n\t\t\telse:\n\t\t\t\treturn np.array([[]])\n\n\n\tprint('Time:{}. Start to compute features'.format(time_now()))\n\t# compute features\n\t# base._resume_model(test_step)\n\tbase.set_eval()\n\tfeatures_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()\n\n\twith torch.no_grad():\n\t\tfor i, data in enumerate(loaders.rgb_all_loader):\n\t\t\t# load data\n\t\t\timages, pids, cids, _ = data\n\t\t\timages = base.G_rgb2ir(images.to(base.device)).data.cpu()\n\t\t\t# forward\n\t\t\tfeatures = compute_features(images)\n\t\t\t# meter\n\t\t\tfeatures_meter.update(features.data)\n\t\t\tpids_meter.update(pids.data)\n\t\t\tcids_meter.update(cids.data)\n\n\t\tfor i, data in enumerate(loaders.ir_all_loader):\n\t\t\t# load data\n\t\t\timages, pids, cids, _ = data\n\t\t\t# forward\n\t\t\tfeatures = compute_features(images)\n\t\t\t# meter\n\t\t\tfeatures_meter.update(features.data)\n\t\t\tpids_meter.update(pids.data)\n\t\t\tcids_meter.update(cids.data)\n\n\tprint('Time:{}. Start to normalize features.'.format(time_now()))\n\t# normalize features\n\tfeatures = features_meter.get_val()\n\tfeatures = normalize_and_resize_feature(features)\n\tfeatures = features.data.cpu().numpy()\n\tpids = pids_meter.get_val_numpy()\n\tcids = cids_meter.get_val_numpy()\n\n\tprint('Time: {}. Note: Start to save features as .mat file'.format(time_now()))\n\t# save features as .mat file\n\tresults = {1: XX(), 2: XX(), 3: XX(), 4: XX(), 5: XX(), 6: XX()}\n\tfor i in range(features.shape[0]):\n\t\tfeature = features[i, :]\n\t\tfeature = np.resize(feature, [1, feature.shape[0]])\n\t\tcid, pid = cids[i], pids[i]\n\t\tresults[cid].update(pid, feature)\n\n\tpid_num_of_cids = [333, 333, 533, 533, 533, 333]\n\tcids = [1, 2, 3, 4, 5, 6]\n\tfor cid in cids:\n\t\ta_result = results[cid]\n\t\txx = []\n\t\tfor pid in range(1, 1+ pid_num_of_cids[cid - 1]):\n\t\t\txx.append([a_result.get_val(pid).astype(np.double)])\n\t\txx = np.array(xx)\n\t\tsio.savemat(os.path.join(base.save_features_path, 'feature_cam{}.mat'.format(cid)), {'feature': xx})\n\n\n\ndef save_images(base, current_step):\n\n\t#base.set_eval()\n\twith torch.no_grad():\n\t\tfixed_fake_ir_images = base.G_rgb2ir(base.fixed_real_rgb_images).detach()\n\t\txxxx = torch.cat([base.fixed_real_rgb_images, fixed_fake_ir_images, base.fixed_real_ir_images], dim=0)\n\t\tsave_image((xxxx.data.cpu() + 1.0) / 2.0,\n\t\t os.path.join(base.save_images_path, 'image_{}.jpg'.format(current_step)), nrow=base.fixed_real_rgb_images.size(0), padding=0)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# LCP 74. 最强祝福力场-离散化+二维差分
# https://leetcode.cn/problems/xepqZ5/
# forceField[i] = [x,y,side] 表示第 i 片力场将覆盖以坐标 (x,y) 为中心,边长为 side 的正方形区域。
# !若任意一点的 力场强度 等于覆盖该点的力场数量,请求出在这片地带中 力场强度 最强处的 力场强度。
# !统计所有左下和右上坐标,由于会出现 0.5可以将坐标乘 2。
# O(n^2)
from typing import List
from 二维差分模板 import DiffMatrix
class Solution:
def fieldOfGreatestBlessing(self, forceField: List[List[int]]) -> int:
# 离散化
allX, allY = set(), set()
for x, y, side in forceField:
allX.add(2 * x - side)
allX.add(2 * x + side)
allY.add(2 * y - side)
allY.add(2 * y + side)
sortedX = sorted(allX)
sortedY = sorted(allY)
rankX = {x: i for i, x in enumerate(sortedX)}
rankY = {y: i for i, y in enumerate(sortedY)}
# 二维差分
row, col = len(sortedX), len(sortedY)
diffMatrix = DiffMatrix([[0] * col for _ in range(row)])
for x, y, side in forceField:
r1, c1 = rankX[2 * x - side], rankY[2 * y - side]
r2, c2 = rankX[2 * x + side], rankY[2 * y + side]
diffMatrix.add(r1, c1, r2, c2, 1)
diffMatrix.update()
res = 0
for i in range(row):
for j in range(col):
res = max(res, diffMatrix.query(i, j))
return res
|
normal
|
{
"blob_id": "0212382b5c8cc1e98142a784fd26efd577ebceaf",
"index": 1656,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def fieldOfGreatestBlessing(self, forceField: List[List[int]]) ->int:\n allX, allY = set(), set()\n for x, y, side in forceField:\n allX.add(2 * x - side)\n allX.add(2 * x + side)\n allY.add(2 * y - side)\n allY.add(2 * y + side)\n sortedX = sorted(allX)\n sortedY = sorted(allY)\n rankX = {x: i for i, x in enumerate(sortedX)}\n rankY = {y: i for i, y in enumerate(sortedY)}\n row, col = len(sortedX), len(sortedY)\n diffMatrix = DiffMatrix([([0] * col) for _ in range(row)])\n for x, y, side in forceField:\n r1, c1 = rankX[2 * x - side], rankY[2 * y - side]\n r2, c2 = rankX[2 * x + side], rankY[2 * y + side]\n diffMatrix.add(r1, c1, r2, c2, 1)\n diffMatrix.update()\n res = 0\n for i in range(row):\n for j in range(col):\n res = max(res, diffMatrix.query(i, j))\n return res\n",
"step-4": "from typing import List\nfrom 二维差分模板 import DiffMatrix\n\n\nclass Solution:\n\n def fieldOfGreatestBlessing(self, forceField: List[List[int]]) ->int:\n allX, allY = set(), set()\n for x, y, side in forceField:\n allX.add(2 * x - side)\n allX.add(2 * x + side)\n allY.add(2 * y - side)\n allY.add(2 * y + side)\n sortedX = sorted(allX)\n sortedY = sorted(allY)\n rankX = {x: i for i, x in enumerate(sortedX)}\n rankY = {y: i for i, y in enumerate(sortedY)}\n row, col = len(sortedX), len(sortedY)\n diffMatrix = DiffMatrix([([0] * col) for _ in range(row)])\n for x, y, side in forceField:\n r1, c1 = rankX[2 * x - side], rankY[2 * y - side]\n r2, c2 = rankX[2 * x + side], rankY[2 * y + side]\n diffMatrix.add(r1, c1, r2, c2, 1)\n diffMatrix.update()\n res = 0\n for i in range(row):\n for j in range(col):\n res = max(res, diffMatrix.query(i, j))\n return res\n",
"step-5": "# LCP 74. 最强祝福力场-离散化+二维差分\r\n# https://leetcode.cn/problems/xepqZ5/\r\n# forceField[i] = [x,y,side] 表示第 i 片力场将覆盖以坐标 (x,y) 为中心,边长为 side 的正方形区域。\r\n# !若任意一点的 力场强度 等于覆盖该点的力场数量,请求出在这片地带中 力场强度 最强处的 力场强度。\r\n\r\n# !统计所有左下和右上坐标,由于会出现 0.5可以将坐标乘 2。\r\n# O(n^2)\r\n\r\n\r\nfrom typing import List\r\nfrom 二维差分模板 import DiffMatrix\r\n\r\n\r\nclass Solution:\r\n def fieldOfGreatestBlessing(self, forceField: List[List[int]]) -> int:\r\n # 离散化\r\n allX, allY = set(), set()\r\n for x, y, side in forceField:\r\n allX.add(2 * x - side)\r\n allX.add(2 * x + side)\r\n allY.add(2 * y - side)\r\n allY.add(2 * y + side)\r\n sortedX = sorted(allX)\r\n sortedY = sorted(allY)\r\n rankX = {x: i for i, x in enumerate(sortedX)}\r\n rankY = {y: i for i, y in enumerate(sortedY)}\r\n\r\n # 二维差分\r\n row, col = len(sortedX), len(sortedY)\r\n diffMatrix = DiffMatrix([[0] * col for _ in range(row)])\r\n for x, y, side in forceField:\r\n r1, c1 = rankX[2 * x - side], rankY[2 * y - side]\r\n r2, c2 = rankX[2 * x + side], rankY[2 * y + side]\r\n diffMatrix.add(r1, c1, r2, c2, 1)\r\n\r\n diffMatrix.update()\r\n\r\n res = 0\r\n for i in range(row):\r\n for j in range(col):\r\n res = max(res, diffMatrix.query(i, j))\r\n return res\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app.config.from_object('config.DevelopmentConfig')
<|reserved_special_token_0|>
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app.config.from_object('config.DevelopmentConfig')
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<|reserved_special_token_1|>
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from server import app, db
app.config.from_object('config.DevelopmentConfig')
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# @Author: Marcela Campo
# @Date: 2016-05-06 18:56:47
# @Last Modified by: Marcela Campo
# @Last Modified time: 2016-05-06 19:03:21
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from server import app, db
app.config.from_object('config.DevelopmentConfig')
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
flexible
|
{
"blob_id": "d7b91b0476a1f2e00408ce1f1501bf98d4c06e4e",
"index": 9540,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp.config.from_object('config.DevelopmentConfig')\n<mask token>\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"step-3": "<mask token>\napp.config.from_object('config.DevelopmentConfig')\nmigrate = Migrate(app, db)\nmanager = Manager(app)\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"step-4": "import os\nfrom flask.ext.script import Manager\nfrom flask.ext.migrate import Migrate, MigrateCommand\nfrom server import app, db\napp.config.from_object('config.DevelopmentConfig')\nmigrate = Migrate(app, db)\nmanager = Manager(app)\nmanager.add_command('db', MigrateCommand)\nif __name__ == '__main__':\n manager.run()\n",
"step-5": "# -*- coding: utf-8 -*-\n# @Author: Marcela Campo\n# @Date: 2016-05-06 18:56:47\n# @Last Modified by: Marcela Campo\n# @Last Modified time: 2016-05-06 19:03:21\nimport os\nfrom flask.ext.script import Manager\nfrom flask.ext.migrate import Migrate, MigrateCommand\n\nfrom server import app, db\n\n\napp.config.from_object('config.DevelopmentConfig')\n\nmigrate = Migrate(app, db)\nmanager = Manager(app)\n\nmanager.add_command('db', MigrateCommand)\n\n\nif __name__ == '__main__':\n manager.run()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def lcs(X, Y, m, n):
dp = [([0] * (n + 1)) for i in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if X[i - 1] == Y[j - 1]:
dp[i][j] = 1 + dp[i - 1][j - 1]
else:
dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])
index = dp[m][n]
s = ''
i = m
j = n
while i > 0 and j > 0:
if X[i - 1] == Y[j - 1]:
s += X[i - 1]
i -= 1
j -= 1
elif dp[i - 1][j] > dp[i][j - 1]:
i -= 1
else:
j -= 1
return s
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def lcs(X, Y, m, n):
dp = [([0] * (n + 1)) for i in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if X[i - 1] == Y[j - 1]:
dp[i][j] = 1 + dp[i - 1][j - 1]
else:
dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])
index = dp[m][n]
s = ''
i = m
j = n
while i > 0 and j > 0:
if X[i - 1] == Y[j - 1]:
s += X[i - 1]
i -= 1
j -= 1
elif dp[i - 1][j] > dp[i][j - 1]:
i -= 1
else:
j -= 1
return s
<|reserved_special_token_0|>
print('Length of LCS is ', lcs(X, Y, len(X), len(Y)))
<|reserved_special_token_1|>
def lcs(X, Y, m, n):
dp = [([0] * (n + 1)) for i in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if X[i - 1] == Y[j - 1]:
dp[i][j] = 1 + dp[i - 1][j - 1]
else:
dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])
index = dp[m][n]
s = ''
i = m
j = n
while i > 0 and j > 0:
if X[i - 1] == Y[j - 1]:
s += X[i - 1]
i -= 1
j -= 1
elif dp[i - 1][j] > dp[i][j - 1]:
i -= 1
else:
j -= 1
return s
X = 'AGGTAB'
Y = 'GXTXAYB'
print('Length of LCS is ', lcs(X, Y, len(X), len(Y)))
<|reserved_special_token_1|>
def lcs(X, Y, m, n):
dp = [[0]*(n+1) for i in range(m+1)]
for i in range(1,m+1):
for j in range(1,n+1):
if X[i-1] == Y[j-1]:
dp[i][j] = 1 + dp[i-1][j-1]
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
index = dp[m][n]
s = ""
i = m
j = n
while i > 0 and j > 0:
if X[i-1] == Y[j-1]:
s += X[i-1]
i -= 1
j -= 1
elif dp[i-1][j] > dp[i][j-1]:
i -= 1
else:
j -= 1
return s
X = "AGGTAB"
Y = "GXTXAYB"
print("Length of LCS is ", lcs(X , Y, len(X), len(Y)))
|
flexible
|
{
"blob_id": "247e352b7772a1da74a26f007228355f5af8d3b3",
"index": 191,
"step-1": "<mask token>\n",
"step-2": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\n<mask token>\n",
"step-3": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\n<mask token>\nprint('Length of LCS is ', lcs(X, Y, len(X), len(Y)))\n",
"step-4": "def lcs(X, Y, m, n):\n dp = [([0] * (n + 1)) for i in range(m + 1)]\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if X[i - 1] == Y[j - 1]:\n dp[i][j] = 1 + dp[i - 1][j - 1]\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n index = dp[m][n]\n s = ''\n i = m\n j = n\n while i > 0 and j > 0:\n if X[i - 1] == Y[j - 1]:\n s += X[i - 1]\n i -= 1\n j -= 1\n elif dp[i - 1][j] > dp[i][j - 1]:\n i -= 1\n else:\n j -= 1\n return s\n\n\nX = 'AGGTAB'\nY = 'GXTXAYB'\nprint('Length of LCS is ', lcs(X, Y, len(X), len(Y)))\n",
"step-5": "def lcs(X, Y, m, n):\r\n dp = [[0]*(n+1) for i in range(m+1)]\r\n\r\n for i in range(1,m+1):\r\n for j in range(1,n+1):\r\n\r\n if X[i-1] == Y[j-1]:\r\n dp[i][j] = 1 + dp[i-1][j-1]\r\n else:\r\n dp[i][j] = max(dp[i-1][j], dp[i][j-1])\r\n\r\n index = dp[m][n]\r\n s = \"\"\r\n\r\n i = m\r\n j = n\r\n while i > 0 and j > 0:\r\n if X[i-1] == Y[j-1]:\r\n s += X[i-1]\r\n i -= 1\r\n j -= 1\r\n elif dp[i-1][j] > dp[i][j-1]:\r\n i -= 1\r\n else:\r\n j -= 1\r\n\r\n return s\r\n\r\nX = \"AGGTAB\"\r\nY = \"GXTXAYB\"\r\nprint(\"Length of LCS is \", lcs(X , Y, len(X), len(Y)))\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from typing import Any, Optional
from aiogram import types
from aiogram.dispatcher.middlewares import BaseMiddleware
from scene_manager.loader.loader import Loader
from scene_manager.utils import content_type_checker
class ScenesMiddleware(BaseMiddleware):
def __init__(self, *, loader: Optional[Loader] = None, default_scene_name: Optional[str] = None):
self._default_scene_name = default_scene_name or "start"
self._loader = loader or Loader.get_current()
if self._loader is None:
self._loader = Loader()
if not self._loader.is_scenes_loaded:
self._loader.load_scenes()
self._storage = self._loader.data_storage
super().__init__()
async def on_post_process_message(self, message: types.Message, results: tuple, data: dict):
if data:
return
user_scene_name = await self._get_scene_name(message)
for scene_model in self._loader.handlers_storage.get_message_scene(user_scene_name):
if content_type_checker(message, scene_model.config.get("content_types")):
await scene_model.handler(message)
else:
otherwise_handler = scene_model.config.get("otherwise_handler")
if otherwise_handler is not None:
await otherwise_handler(message)
async def on_post_process_callback_query(
self, callback_query: types.CallbackQuery, results: tuple, data: dict
):
if data:
return
user_scene_name = await self._get_scene_name(callback_query)
for scene_model in self._loader.handlers_storage.get_callback_query_scene(user_scene_name):
await scene_model.handler(callback_query)
async def _get_scene_name(self, ctx) -> Any:
user_id = ctx.from_user.id
user_scene = await self._storage.get(user_id)
if user_scene is None:
await self._storage.put(user_id, self._default_scene_name)
user_scene = self._default_scene_name
return user_scene
|
normal
|
{
"blob_id": "11db76cba3dd76cad0d660a0e189d3e4c465071b",
"index": 8836,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ScenesMiddleware(BaseMiddleware):\n <mask token>\n\n async def on_post_process_message(self, message: types.Message, results:\n tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(message)\n for scene_model in self._loader.handlers_storage.get_message_scene(\n user_scene_name):\n if content_type_checker(message, scene_model.config.get(\n 'content_types')):\n await scene_model.handler(message)\n else:\n otherwise_handler = scene_model.config.get('otherwise_handler')\n if otherwise_handler is not None:\n await otherwise_handler(message)\n\n async def on_post_process_callback_query(self, callback_query: types.\n CallbackQuery, results: tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(callback_query)\n for scene_model in self._loader.handlers_storage.get_callback_query_scene(\n user_scene_name):\n await scene_model.handler(callback_query)\n\n async def _get_scene_name(self, ctx) ->Any:\n user_id = ctx.from_user.id\n user_scene = await self._storage.get(user_id)\n if user_scene is None:\n await self._storage.put(user_id, self._default_scene_name)\n user_scene = self._default_scene_name\n return user_scene\n",
"step-3": "<mask token>\n\n\nclass ScenesMiddleware(BaseMiddleware):\n\n def __init__(self, *, loader: Optional[Loader]=None, default_scene_name:\n Optional[str]=None):\n self._default_scene_name = default_scene_name or 'start'\n self._loader = loader or Loader.get_current()\n if self._loader is None:\n self._loader = Loader()\n if not self._loader.is_scenes_loaded:\n self._loader.load_scenes()\n self._storage = self._loader.data_storage\n super().__init__()\n\n async def on_post_process_message(self, message: types.Message, results:\n tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(message)\n for scene_model in self._loader.handlers_storage.get_message_scene(\n user_scene_name):\n if content_type_checker(message, scene_model.config.get(\n 'content_types')):\n await scene_model.handler(message)\n else:\n otherwise_handler = scene_model.config.get('otherwise_handler')\n if otherwise_handler is not None:\n await otherwise_handler(message)\n\n async def on_post_process_callback_query(self, callback_query: types.\n CallbackQuery, results: tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(callback_query)\n for scene_model in self._loader.handlers_storage.get_callback_query_scene(\n user_scene_name):\n await scene_model.handler(callback_query)\n\n async def _get_scene_name(self, ctx) ->Any:\n user_id = ctx.from_user.id\n user_scene = await self._storage.get(user_id)\n if user_scene is None:\n await self._storage.put(user_id, self._default_scene_name)\n user_scene = self._default_scene_name\n return user_scene\n",
"step-4": "from typing import Any, Optional\nfrom aiogram import types\nfrom aiogram.dispatcher.middlewares import BaseMiddleware\nfrom scene_manager.loader.loader import Loader\nfrom scene_manager.utils import content_type_checker\n\n\nclass ScenesMiddleware(BaseMiddleware):\n\n def __init__(self, *, loader: Optional[Loader]=None, default_scene_name:\n Optional[str]=None):\n self._default_scene_name = default_scene_name or 'start'\n self._loader = loader or Loader.get_current()\n if self._loader is None:\n self._loader = Loader()\n if not self._loader.is_scenes_loaded:\n self._loader.load_scenes()\n self._storage = self._loader.data_storage\n super().__init__()\n\n async def on_post_process_message(self, message: types.Message, results:\n tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(message)\n for scene_model in self._loader.handlers_storage.get_message_scene(\n user_scene_name):\n if content_type_checker(message, scene_model.config.get(\n 'content_types')):\n await scene_model.handler(message)\n else:\n otherwise_handler = scene_model.config.get('otherwise_handler')\n if otherwise_handler is not None:\n await otherwise_handler(message)\n\n async def on_post_process_callback_query(self, callback_query: types.\n CallbackQuery, results: tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(callback_query)\n for scene_model in self._loader.handlers_storage.get_callback_query_scene(\n user_scene_name):\n await scene_model.handler(callback_query)\n\n async def _get_scene_name(self, ctx) ->Any:\n user_id = ctx.from_user.id\n user_scene = await self._storage.get(user_id)\n if user_scene is None:\n await self._storage.put(user_id, self._default_scene_name)\n user_scene = self._default_scene_name\n return user_scene\n",
"step-5": "from typing import Any, Optional\n\nfrom aiogram import types\nfrom aiogram.dispatcher.middlewares import BaseMiddleware\n\nfrom scene_manager.loader.loader import Loader\nfrom scene_manager.utils import content_type_checker\n\n\nclass ScenesMiddleware(BaseMiddleware):\n def __init__(self, *, loader: Optional[Loader] = None, default_scene_name: Optional[str] = None):\n self._default_scene_name = default_scene_name or \"start\"\n self._loader = loader or Loader.get_current()\n if self._loader is None:\n self._loader = Loader()\n if not self._loader.is_scenes_loaded:\n self._loader.load_scenes()\n self._storage = self._loader.data_storage\n super().__init__()\n\n async def on_post_process_message(self, message: types.Message, results: tuple, data: dict):\n if data:\n return\n user_scene_name = await self._get_scene_name(message)\n for scene_model in self._loader.handlers_storage.get_message_scene(user_scene_name):\n if content_type_checker(message, scene_model.config.get(\"content_types\")):\n await scene_model.handler(message)\n else:\n otherwise_handler = scene_model.config.get(\"otherwise_handler\")\n if otherwise_handler is not None:\n await otherwise_handler(message)\n\n async def on_post_process_callback_query(\n self, callback_query: types.CallbackQuery, results: tuple, data: dict\n ):\n if data:\n return\n user_scene_name = await self._get_scene_name(callback_query)\n for scene_model in self._loader.handlers_storage.get_callback_query_scene(user_scene_name):\n await scene_model.handler(callback_query)\n\n async def _get_scene_name(self, ctx) -> Any:\n user_id = ctx.from_user.id\n user_scene = await self._storage.get(user_id)\n if user_scene is None:\n await self._storage.put(user_id, self._default_scene_name)\n user_scene = self._default_scene_name\n return user_scene\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import requests
rsp = requests.get(
'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'
% ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))
print(rsp.text)
|
normal
|
{
"blob_id": "d86fe165e378e56650e3b76bf3d0f72e2a50a023",
"index": 5082,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(rsp.text)\n",
"step-3": "<mask token>\nrsp = requests.get(\n 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'\n % ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))\nprint(rsp.text)\n",
"step-4": "import requests\nrsp = requests.get(\n 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'\n % ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))\nprint(rsp.text)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class ZipTools:
<|reserved_special_token_0|>
@staticmethod
def descomprimir(archivo, dir_extraer):
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.
EdcaMensajes.obt_mensaje(err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)
zip_ref.extractall(dir_extraer)
zip_ref.close()
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ZipTools:
<|reserved_special_token_0|>
@staticmethod
def descomprimir(archivo, dir_extraer):
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.
EdcaMensajes.obt_mensaje(err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)
zip_ref.extractall(dir_extraer)
zip_ref.close()
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
@staticmethod
def obtener_contenido_zip(archivo):
global zp
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
zp = contenido.filename
zip_ref.close()
return zp
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ZipTools:
@staticmethod
def comprimir(archivo, dir_comprimir):
__archivo_zip = archivo[:archivo.find('.')] + '.zip'
try:
with zipfile.ZipFile(__archivo_zip, 'w', zipfile.ZIP_DEFLATED
) as archivoZip:
archivoZip.write(archivo)
archivoZip.close()
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
@staticmethod
def descomprimir(archivo, dir_extraer):
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.
EdcaMensajes.obt_mensaje(err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)
zip_ref.extractall(dir_extraer)
zip_ref.close()
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
@staticmethod
def obtener_contenido_zip(archivo):
global zp
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
zp = contenido.filename
zip_ref.close()
return zp
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import zipfile
from edca_mensajes import EdcaErrores as err, EdcaMensajes as msg
from edca_logs.EdcaLogger import EdcaLogger as log
class ZipTools:
@staticmethod
def comprimir(archivo, dir_comprimir):
__archivo_zip = archivo[:archivo.find('.')] + '.zip'
try:
with zipfile.ZipFile(__archivo_zip, 'w', zipfile.ZIP_DEFLATED
) as archivoZip:
archivoZip.write(archivo)
archivoZip.close()
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
@staticmethod
def descomprimir(archivo, dir_extraer):
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.
EdcaMensajes.obt_mensaje(err.EdcaErrores.
INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)
zip_ref.extractall(dir_extraer)
zip_ref.close()
log.registrar_log_info(__name__, err.EdcaErrores.
INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
@staticmethod
def obtener_contenido_zip(archivo):
global zp
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
zp = contenido.filename
zip_ref.close()
return zp
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %
PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.
ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.
obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.
filename % IOError.strerror)
<|reserved_special_token_1|>
"""
PROYECTO : Portal EDCA-HN
NOMBRE : ZipTools
Descripcion : Clase utilitaria para descomprimir archivos ZIP.
MM/DD/YYYY Colaboradores Descripcion
05/07/2019 Alla Duenas Creacion.
"""
import zipfile
from edca_mensajes import EdcaErrores as err, EdcaMensajes as msg
from edca_logs.EdcaLogger import EdcaLogger as log
class ZipTools:
# Funcion para cromprimir los archivos descargados
@staticmethod
def comprimir(archivo, dir_comprimir):
__archivo_zip = archivo[:archivo.find(".")] + ".zip"
try:
with zipfile.ZipFile(__archivo_zip,'w', zipfile.ZIP_DEFLATED) as archivoZip:
archivoZip.write(archivo)
archivoZip.close()
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(
err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)
# Funcion para descromprimir los archivos descargados
@staticmethod
def descomprimir(archivo, dir_extraer):
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
log.registrar_log_info(__name__, err.EdcaErrores.INFO_ZIPTOOL_PRINT_DIR,
"EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)
zip_ref.extractall(dir_extraer)
zip_ref.close()
log.registrar_log_info(__name__, err.EdcaErrores.INFO_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(
err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)
@staticmethod
def obtener_contenido_zip(archivo):
global zp
try:
zip_ref = zipfile.ZipFile(archivo, 'r')
zip_list = zip_ref.infolist()
for contenido in zip_list:
zp = contenido.filename
zip_ref.close()
return zp
except PermissionError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP)
% PermissionError.filename % PermissionError.strerror)
except IOError:
log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, "EXTRAER ARCHIVO",
msg.EdcaMensajes.obt_mensaje(
err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)
|
flexible
|
{
"blob_id": "1190e802fde6c2c6f48bd2720688bd9231b622e0",
"index": 6564,
"step-1": "<mask token>\n\n\nclass ZipTools:\n <mask token>\n\n @staticmethod\n def descomprimir(archivo, dir_extraer):\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.\n EdcaMensajes.obt_mensaje(err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)\n zip_ref.extractall(dir_extraer)\n zip_ref.close()\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ZipTools:\n <mask token>\n\n @staticmethod\n def descomprimir(archivo, dir_extraer):\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.\n EdcaMensajes.obt_mensaje(err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)\n zip_ref.extractall(dir_extraer)\n zip_ref.close()\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n\n @staticmethod\n def obtener_contenido_zip(archivo):\n global zp\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n zp = contenido.filename\n zip_ref.close()\n return zp\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n",
"step-3": "<mask token>\n\n\nclass ZipTools:\n\n @staticmethod\n def comprimir(archivo, dir_comprimir):\n __archivo_zip = archivo[:archivo.find('.')] + '.zip'\n try:\n with zipfile.ZipFile(__archivo_zip, 'w', zipfile.ZIP_DEFLATED\n ) as archivoZip:\n archivoZip.write(archivo)\n archivoZip.close()\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n\n @staticmethod\n def descomprimir(archivo, dir_extraer):\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.\n EdcaMensajes.obt_mensaje(err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)\n zip_ref.extractall(dir_extraer)\n zip_ref.close()\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n\n @staticmethod\n def obtener_contenido_zip(archivo):\n global zp\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n zp = contenido.filename\n zip_ref.close()\n return zp\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n",
"step-4": "<mask token>\nimport zipfile\nfrom edca_mensajes import EdcaErrores as err, EdcaMensajes as msg\nfrom edca_logs.EdcaLogger import EdcaLogger as log\n\n\nclass ZipTools:\n\n @staticmethod\n def comprimir(archivo, dir_comprimir):\n __archivo_zip = archivo[:archivo.find('.')] + '.zip'\n try:\n with zipfile.ZipFile(__archivo_zip, 'w', zipfile.ZIP_DEFLATED\n ) as archivoZip:\n archivoZip.write(archivo)\n archivoZip.close()\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n\n @staticmethod\n def descomprimir(archivo, dir_extraer):\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR, 'EXTRAER ARCHIVO', msg.\n EdcaMensajes.obt_mensaje(err.EdcaErrores.\n INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)\n zip_ref.extractall(dir_extraer)\n zip_ref.close()\n log.registrar_log_info(__name__, err.EdcaErrores.\n INFO_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n\n @staticmethod\n def obtener_contenido_zip(archivo):\n global zp\n try:\n zip_ref = zipfile.ZipFile(archivo, 'r')\n zip_list = zip_ref.infolist()\n for contenido in zip_list:\n zp = contenido.filename\n zip_ref.close()\n return zp\n except PermissionError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) %\n PermissionError.filename % PermissionError.strerror)\n except IOError:\n log.registrar_log_error(__name__, err.EdcaErrores.\n ERR_ZIPTOOL_UNZIP, 'EXTRAER ARCHIVO', msg.EdcaMensajes.\n obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.\n filename % IOError.strerror)\n",
"step-5": "\"\"\"\r\nPROYECTO : Portal EDCA-HN\r\nNOMBRE : ZipTools\r\nDescripcion : Clase utilitaria para descomprimir archivos ZIP.\r\n\r\nMM/DD/YYYY Colaboradores Descripcion\r\n05/07/2019 Alla Duenas Creacion. \r\n\"\"\"\r\n\r\nimport zipfile\r\nfrom edca_mensajes import EdcaErrores as err, EdcaMensajes as msg\r\nfrom edca_logs.EdcaLogger import EdcaLogger as log\r\n\r\n\r\nclass ZipTools:\r\n\r\n # Funcion para cromprimir los archivos descargados\r\n @staticmethod\r\n def comprimir(archivo, dir_comprimir):\r\n __archivo_zip = archivo[:archivo.find(\".\")] + \".zip\"\r\n try:\r\n with zipfile.ZipFile(__archivo_zip,'w', zipfile.ZIP_DEFLATED) as archivoZip:\r\n archivoZip.write(archivo)\r\n archivoZip.close()\r\n\r\n except PermissionError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % PermissionError.filename % PermissionError.strerror)\r\n except IOError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(\r\n err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)\r\n \r\n # Funcion para descromprimir los archivos descargados\r\n @staticmethod\r\n def descomprimir(archivo, dir_extraer):\r\n try:\r\n zip_ref = zipfile.ZipFile(archivo, 'r')\r\n zip_list = zip_ref.infolist()\r\n for contenido in zip_list:\r\n log.registrar_log_info(__name__, err.EdcaErrores.INFO_ZIPTOOL_PRINT_DIR,\r\n \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_PRINT_DIR) % contenido.filename)\r\n zip_ref.extractall(dir_extraer)\r\n zip_ref.close()\r\n log.registrar_log_info(__name__, err.EdcaErrores.INFO_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.INFO_ZIPTOOL_UNZIP))\r\n except PermissionError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % PermissionError.filename % PermissionError.strerror)\r\n except IOError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(\r\n err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)\r\n\r\n @staticmethod\r\n def obtener_contenido_zip(archivo):\r\n global zp\r\n try:\r\n zip_ref = zipfile.ZipFile(archivo, 'r')\r\n zip_list = zip_ref.infolist()\r\n for contenido in zip_list:\r\n zp = contenido.filename\r\n zip_ref.close()\r\n return zp\r\n except PermissionError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(err.EdcaErrores.ERR_ZIPTOOL_UNZIP)\r\n % PermissionError.filename % PermissionError.strerror)\r\n except IOError:\r\n log.registrar_log_error(__name__, err.EdcaErrores.ERR_ZIPTOOL_UNZIP, \"EXTRAER ARCHIVO\",\r\n msg.EdcaMensajes.obt_mensaje(\r\n err.EdcaErrores.ERR_ZIPTOOL_UNZIP) % IOError.filename % IOError.strerror)\r\n\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from flask import escape
import pandas as pd
import json
import requests
with open('result.csv', newline='') as f:
df = pd.read_csv(f)
def get_level_diff(word, only_common=False):
if only_common:
word_df = df[(df['word']==word) & (df['common']==1)]
else:
word_df = df[df['word']==word]
return (word_df.values[0][3], word_df.values[0][8]) if len(word_df) > 0 else (None, None)
# order words based on either level or frequency.
def order_words(words, by=0, reverse=False, only_common=False):
if (by not in {0, 1}): raise Exception("by is either 0 (by level), 1 (by frequency)")
if (by == 1): reverse = not reverse
word_results = []
for word in words:
level, freq = get_level_diff(word, only_common=only_common)
if level != None:
if by == 0:
word_results.append((word, level))
else:
word_results.append((word, freq))
word_results.sort(key=lambda x : x[1], reverse=reverse)
return word_results
def translate_words(words, target):
key = "AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w"
words_string = ""
for word in words:
words_string += "&q="
words_string += word
url = f"https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}"
res = json.loads(requests.get(url).content)['data']['translations']
return [s['translatedText'] for s in res]
def hello_http(request):
request_args = request.args
#'words', 'lang-from', 'lang-to', 'by', 'reverse'
if request_args and 'words' in request_args:
words = json.loads(request_args['words'])
if isinstance(words, list) and len(words) > 0:
target = request_args.get('target', 'es')
by_str = request_args.get('by', 'level')
by = 1 if by_str == 'freq' else 0
reverse = request_args.get('reverse', 'false') == 'true'
only_common = request_args.get('only-common', 'false') == 'true'
results = order_words(words, by=by, reverse=reverse, only_common=only_common)
translated = translate_words([result[0] for result in results], target)
return json.dumps([[results[i][0], results[i][1], translated[i]] for i in range(len(results))])
else:
return "not list"
else:
return "error"
|
normal
|
{
"blob_id": "2f489a87e40bea979000dd429cc4cb0150ff4c3b",
"index": 908,
"step-1": "<mask token>\n\n\ndef get_level_diff(word, only_common=False):\n if only_common:\n word_df = df[(df['word'] == word) & (df['common'] == 1)]\n else:\n word_df = df[df['word'] == word]\n return (word_df.values[0][3], word_df.values[0][8]) if len(word_df\n ) > 0 else (None, None)\n\n\n<mask token>\n\n\ndef translate_words(words, target):\n key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'\n words_string = ''\n for word in words:\n words_string += '&q='\n words_string += word\n url = (\n f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'\n )\n res = json.loads(requests.get(url).content)['data']['translations']\n return [s['translatedText'] for s in res]\n\n\ndef hello_http(request):\n request_args = request.args\n if request_args and 'words' in request_args:\n words = json.loads(request_args['words'])\n if isinstance(words, list) and len(words) > 0:\n target = request_args.get('target', 'es')\n by_str = request_args.get('by', 'level')\n by = 1 if by_str == 'freq' else 0\n reverse = request_args.get('reverse', 'false') == 'true'\n only_common = request_args.get('only-common', 'false') == 'true'\n results = order_words(words, by=by, reverse=reverse,\n only_common=only_common)\n translated = translate_words([result[0] for result in results],\n target)\n return json.dumps([[results[i][0], results[i][1], translated[i]\n ] for i in range(len(results))])\n else:\n return 'not list'\n else:\n return 'error'\n",
"step-2": "<mask token>\n\n\ndef get_level_diff(word, only_common=False):\n if only_common:\n word_df = df[(df['word'] == word) & (df['common'] == 1)]\n else:\n word_df = df[df['word'] == word]\n return (word_df.values[0][3], word_df.values[0][8]) if len(word_df\n ) > 0 else (None, None)\n\n\ndef order_words(words, by=0, reverse=False, only_common=False):\n if by not in {0, 1}:\n raise Exception('by is either 0 (by level), 1 (by frequency)')\n if by == 1:\n reverse = not reverse\n word_results = []\n for word in words:\n level, freq = get_level_diff(word, only_common=only_common)\n if level != None:\n if by == 0:\n word_results.append((word, level))\n else:\n word_results.append((word, freq))\n word_results.sort(key=lambda x: x[1], reverse=reverse)\n return word_results\n\n\ndef translate_words(words, target):\n key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'\n words_string = ''\n for word in words:\n words_string += '&q='\n words_string += word\n url = (\n f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'\n )\n res = json.loads(requests.get(url).content)['data']['translations']\n return [s['translatedText'] for s in res]\n\n\ndef hello_http(request):\n request_args = request.args\n if request_args and 'words' in request_args:\n words = json.loads(request_args['words'])\n if isinstance(words, list) and len(words) > 0:\n target = request_args.get('target', 'es')\n by_str = request_args.get('by', 'level')\n by = 1 if by_str == 'freq' else 0\n reverse = request_args.get('reverse', 'false') == 'true'\n only_common = request_args.get('only-common', 'false') == 'true'\n results = order_words(words, by=by, reverse=reverse,\n only_common=only_common)\n translated = translate_words([result[0] for result in results],\n target)\n return json.dumps([[results[i][0], results[i][1], translated[i]\n ] for i in range(len(results))])\n else:\n return 'not list'\n else:\n return 'error'\n",
"step-3": "<mask token>\nwith open('result.csv', newline='') as f:\n df = pd.read_csv(f)\n\n\ndef get_level_diff(word, only_common=False):\n if only_common:\n word_df = df[(df['word'] == word) & (df['common'] == 1)]\n else:\n word_df = df[df['word'] == word]\n return (word_df.values[0][3], word_df.values[0][8]) if len(word_df\n ) > 0 else (None, None)\n\n\ndef order_words(words, by=0, reverse=False, only_common=False):\n if by not in {0, 1}:\n raise Exception('by is either 0 (by level), 1 (by frequency)')\n if by == 1:\n reverse = not reverse\n word_results = []\n for word in words:\n level, freq = get_level_diff(word, only_common=only_common)\n if level != None:\n if by == 0:\n word_results.append((word, level))\n else:\n word_results.append((word, freq))\n word_results.sort(key=lambda x: x[1], reverse=reverse)\n return word_results\n\n\ndef translate_words(words, target):\n key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'\n words_string = ''\n for word in words:\n words_string += '&q='\n words_string += word\n url = (\n f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'\n )\n res = json.loads(requests.get(url).content)['data']['translations']\n return [s['translatedText'] for s in res]\n\n\ndef hello_http(request):\n request_args = request.args\n if request_args and 'words' in request_args:\n words = json.loads(request_args['words'])\n if isinstance(words, list) and len(words) > 0:\n target = request_args.get('target', 'es')\n by_str = request_args.get('by', 'level')\n by = 1 if by_str == 'freq' else 0\n reverse = request_args.get('reverse', 'false') == 'true'\n only_common = request_args.get('only-common', 'false') == 'true'\n results = order_words(words, by=by, reverse=reverse,\n only_common=only_common)\n translated = translate_words([result[0] for result in results],\n target)\n return json.dumps([[results[i][0], results[i][1], translated[i]\n ] for i in range(len(results))])\n else:\n return 'not list'\n else:\n return 'error'\n",
"step-4": "from flask import escape\nimport pandas as pd\nimport json\nimport requests\nwith open('result.csv', newline='') as f:\n df = pd.read_csv(f)\n\n\ndef get_level_diff(word, only_common=False):\n if only_common:\n word_df = df[(df['word'] == word) & (df['common'] == 1)]\n else:\n word_df = df[df['word'] == word]\n return (word_df.values[0][3], word_df.values[0][8]) if len(word_df\n ) > 0 else (None, None)\n\n\ndef order_words(words, by=0, reverse=False, only_common=False):\n if by not in {0, 1}:\n raise Exception('by is either 0 (by level), 1 (by frequency)')\n if by == 1:\n reverse = not reverse\n word_results = []\n for word in words:\n level, freq = get_level_diff(word, only_common=only_common)\n if level != None:\n if by == 0:\n word_results.append((word, level))\n else:\n word_results.append((word, freq))\n word_results.sort(key=lambda x: x[1], reverse=reverse)\n return word_results\n\n\ndef translate_words(words, target):\n key = 'AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w'\n words_string = ''\n for word in words:\n words_string += '&q='\n words_string += word\n url = (\n f'https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}'\n )\n res = json.loads(requests.get(url).content)['data']['translations']\n return [s['translatedText'] for s in res]\n\n\ndef hello_http(request):\n request_args = request.args\n if request_args and 'words' in request_args:\n words = json.loads(request_args['words'])\n if isinstance(words, list) and len(words) > 0:\n target = request_args.get('target', 'es')\n by_str = request_args.get('by', 'level')\n by = 1 if by_str == 'freq' else 0\n reverse = request_args.get('reverse', 'false') == 'true'\n only_common = request_args.get('only-common', 'false') == 'true'\n results = order_words(words, by=by, reverse=reverse,\n only_common=only_common)\n translated = translate_words([result[0] for result in results],\n target)\n return json.dumps([[results[i][0], results[i][1], translated[i]\n ] for i in range(len(results))])\n else:\n return 'not list'\n else:\n return 'error'\n",
"step-5": "from flask import escape\nimport pandas as pd\nimport json\nimport requests\n\nwith open('result.csv', newline='') as f:\n df = pd.read_csv(f)\n\ndef get_level_diff(word, only_common=False):\n if only_common:\n word_df = df[(df['word']==word) & (df['common']==1)]\n else:\n word_df = df[df['word']==word]\n return (word_df.values[0][3], word_df.values[0][8]) if len(word_df) > 0 else (None, None)\n\n# order words based on either level or frequency. \ndef order_words(words, by=0, reverse=False, only_common=False):\n if (by not in {0, 1}): raise Exception(\"by is either 0 (by level), 1 (by frequency)\")\n if (by == 1): reverse = not reverse\n \n word_results = []\n for word in words:\n level, freq = get_level_diff(word, only_common=only_common)\n if level != None:\n if by == 0:\n word_results.append((word, level))\n else:\n word_results.append((word, freq))\n word_results.sort(key=lambda x : x[1], reverse=reverse)\n return word_results\n\ndef translate_words(words, target):\n key = \"AIzaSyCmB0XTpv7PBLGllUBGyTVZ8syJJz2rL-w\"\n words_string = \"\"\n for word in words:\n words_string += \"&q=\"\n words_string += word\n url = f\"https://translation.googleapis.com/language/translate/v2?target={target}&key={key}{words_string}\"\n res = json.loads(requests.get(url).content)['data']['translations']\n return [s['translatedText'] for s in res]\n\ndef hello_http(request):\n request_args = request.args\n\n #'words', 'lang-from', 'lang-to', 'by', 'reverse'\n\n if request_args and 'words' in request_args:\n words = json.loads(request_args['words'])\n if isinstance(words, list) and len(words) > 0:\n target = request_args.get('target', 'es')\n by_str = request_args.get('by', 'level')\n by = 1 if by_str == 'freq' else 0\n reverse = request_args.get('reverse', 'false') == 'true'\n only_common = request_args.get('only-common', 'false') == 'true'\n \n results = order_words(words, by=by, reverse=reverse, only_common=only_common)\n translated = translate_words([result[0] for result in results], target)\n return json.dumps([[results[i][0], results[i][1], translated[i]] for i in range(len(results))])\n else:\n return \"not list\"\n else:\n return \"error\"",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import matplotlib.pyplot as plt
import numpy as np
from sklearn.utils import shuffle
import math
import vis_utils
class FLAGS(object):
image_height = 100
image_width = 100
image_channel = 1
CORRECT_ORIENTATION = True
class PrepareData():
def __init__(self):
return
def sparse_tuple_from_label(self, sequences, dtype=np.int32):
"""Create a sparse representention of x.
Args:
sequences: a list of lists of type dtype where each element is a sequence
Returns:
A tuple with (indices, values, shape)
"""
indices = []
values = []
for n, seq in enumerate(sequences):
indices.extend(zip([n] * len(seq), range(len(seq))))
values.extend(seq)
indices = np.asarray(indices, dtype=np.int64)
values = np.asarray(values, dtype=dtype)
shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] + 1], dtype=np.int64)
return indices, values, shape
def preprocess_samples(self, samples):
batch_inputs = []
batch_labels = []
for sample in samples:
im,label = sample[:FLAGS.image_height * FLAGS.image_width], sample[FLAGS.image_height * FLAGS.image_width:]
label = label.astype(np.int32).tolist()
im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width, FLAGS.image_channel])
batch_inputs.append(im)
batch_labels.append(label)
res = [batch_inputs]
if self.prepare_get_sparselabel:
res.append(self.sparse_tuple_from_label(batch_labels))
if self.parepare_get_denselabel:
res.append(batch_labels)
return res
def __generator(self, samples, batch_size,is_training=True):
num_samples = len(samples)
while 1: # Loop forever so the generator never terminates
if is_training:
#during traning, shuffle the whole samples at the beginningof the epoch
samples = shuffle(samples)
for offset in range(0, num_samples, batch_size):
if is_training and (offset+batch_size > num_samples -1 ):
# this is to make sure all the batch are of same sizes during training
continue
batch_samples = samples[offset:offset+batch_size]
yield self.preprocess_samples(batch_samples)
def get_samples(self, split_name):
mnist_sequence = "./data/mnist_sequence3_sample_8distortions_9x9.npz"
data = np.load(mnist_sequence)
x_train, y_train = data['X_train'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_train']
x_valid, y_valid = data['X_valid'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_valid']
x_test, y_test = data['X_test'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_test']
if split_name == "train":
res = np.concatenate([x_train, y_train], axis=1)
elif split_name == "sample_test":
res = np.concatenate([x_train[:100], y_train[:100]], axis=1)
elif split_name == "eval":
res = np.concatenate([x_valid, y_valid], axis=1)
else:
res = np.concatenate([x_test, y_test], axis=1)
return res
def input_batch_generator(self, split_name, is_training=False, batch_size=32, get_filenames = False, get_sparselabel = True, get_denselabel = True):
samples = self.get_samples(split_name)
self.prepare_get_filenames = get_filenames
self.prepare_get_sparselabel = get_sparselabel
self.parepare_get_denselabel = get_denselabel
gen = self.__generator(samples, batch_size, is_training=is_training)
return gen, len(samples)
def run(self):
batch_size = 32
split_name = 'sample_test'
# split_name = 'train'
# split_name = 'eval'
generator, dataset_size = self.input_batch_generator(split_name, is_training=True, batch_size=batch_size, get_filenames=True,get_sparselabel = True)
num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size)))
for _ in range(num_batches_per_epoch):
batch_inputs, batch_labels_sparse, batch_labels = next(generator)
batch_inputs = np.array(batch_inputs)
print(batch_labels)
print("batch_size={}".format(len(batch_labels)))
vis = True
if vis:
grid = vis_utils.visualize_grid(batch_inputs[:4])
grid = np.squeeze(grid)
plt.imshow(grid, cmap='gray')
plt.show()
break
return
if __name__ == "__main__":
obj= PrepareData()
obj.run()
|
normal
|
{
"blob_id": "315fe68f4adf39ded46fa9ad059fd2e962e46437",
"index": 8533,
"step-1": "<mask token>\n\n\nclass PrepareData:\n\n def __init__(self):\n return\n\n def sparse_tuple_from_label(self, sequences, dtype=np.int32):\n \"\"\"Create a sparse representention of x.\n Args:\n sequences: a list of lists of type dtype where each element is a sequence\n Returns:\n A tuple with (indices, values, shape)\n \"\"\"\n indices = []\n values = []\n for n, seq in enumerate(sequences):\n indices.extend(zip([n] * len(seq), range(len(seq))))\n values.extend(seq)\n indices = np.asarray(indices, dtype=np.int64)\n values = np.asarray(values, dtype=dtype)\n shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] +\n 1], dtype=np.int64)\n return indices, values, shape\n\n def preprocess_samples(self, samples):\n batch_inputs = []\n batch_labels = []\n for sample in samples:\n im, label = sample[:FLAGS.image_height * FLAGS.image_width\n ], sample[FLAGS.image_height * FLAGS.image_width:]\n label = label.astype(np.int32).tolist()\n im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width,\n FLAGS.image_channel])\n batch_inputs.append(im)\n batch_labels.append(label)\n res = [batch_inputs]\n if self.prepare_get_sparselabel:\n res.append(self.sparse_tuple_from_label(batch_labels))\n if self.parepare_get_denselabel:\n res.append(batch_labels)\n return res\n\n def __generator(self, samples, batch_size, is_training=True):\n num_samples = len(samples)\n while 1:\n if is_training:\n samples = shuffle(samples)\n for offset in range(0, num_samples, batch_size):\n if is_training and offset + batch_size > num_samples - 1:\n continue\n batch_samples = samples[offset:offset + batch_size]\n yield self.preprocess_samples(batch_samples)\n <mask token>\n\n def input_batch_generator(self, split_name, is_training=False,\n batch_size=32, get_filenames=False, get_sparselabel=True,\n get_denselabel=True):\n samples = self.get_samples(split_name)\n self.prepare_get_filenames = get_filenames\n self.prepare_get_sparselabel = get_sparselabel\n self.parepare_get_denselabel = get_denselabel\n gen = self.__generator(samples, batch_size, is_training=is_training)\n return gen, len(samples)\n\n def run(self):\n batch_size = 32\n split_name = 'sample_test'\n generator, dataset_size = self.input_batch_generator(split_name,\n is_training=True, batch_size=batch_size, get_filenames=True,\n get_sparselabel=True)\n num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size))\n )\n for _ in range(num_batches_per_epoch):\n batch_inputs, batch_labels_sparse, batch_labels = next(generator)\n batch_inputs = np.array(batch_inputs)\n print(batch_labels)\n print('batch_size={}'.format(len(batch_labels)))\n vis = True\n if vis:\n grid = vis_utils.visualize_grid(batch_inputs[:4])\n grid = np.squeeze(grid)\n plt.imshow(grid, cmap='gray')\n plt.show()\n break\n return\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass PrepareData:\n\n def __init__(self):\n return\n\n def sparse_tuple_from_label(self, sequences, dtype=np.int32):\n \"\"\"Create a sparse representention of x.\n Args:\n sequences: a list of lists of type dtype where each element is a sequence\n Returns:\n A tuple with (indices, values, shape)\n \"\"\"\n indices = []\n values = []\n for n, seq in enumerate(sequences):\n indices.extend(zip([n] * len(seq), range(len(seq))))\n values.extend(seq)\n indices = np.asarray(indices, dtype=np.int64)\n values = np.asarray(values, dtype=dtype)\n shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] +\n 1], dtype=np.int64)\n return indices, values, shape\n\n def preprocess_samples(self, samples):\n batch_inputs = []\n batch_labels = []\n for sample in samples:\n im, label = sample[:FLAGS.image_height * FLAGS.image_width\n ], sample[FLAGS.image_height * FLAGS.image_width:]\n label = label.astype(np.int32).tolist()\n im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width,\n FLAGS.image_channel])\n batch_inputs.append(im)\n batch_labels.append(label)\n res = [batch_inputs]\n if self.prepare_get_sparselabel:\n res.append(self.sparse_tuple_from_label(batch_labels))\n if self.parepare_get_denselabel:\n res.append(batch_labels)\n return res\n\n def __generator(self, samples, batch_size, is_training=True):\n num_samples = len(samples)\n while 1:\n if is_training:\n samples = shuffle(samples)\n for offset in range(0, num_samples, batch_size):\n if is_training and offset + batch_size > num_samples - 1:\n continue\n batch_samples = samples[offset:offset + batch_size]\n yield self.preprocess_samples(batch_samples)\n\n def get_samples(self, split_name):\n mnist_sequence = './data/mnist_sequence3_sample_8distortions_9x9.npz'\n data = np.load(mnist_sequence)\n x_train, y_train = data['X_train'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_train']\n x_valid, y_valid = data['X_valid'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_valid']\n x_test, y_test = data['X_test'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_test']\n if split_name == 'train':\n res = np.concatenate([x_train, y_train], axis=1)\n elif split_name == 'sample_test':\n res = np.concatenate([x_train[:100], y_train[:100]], axis=1)\n elif split_name == 'eval':\n res = np.concatenate([x_valid, y_valid], axis=1)\n else:\n res = np.concatenate([x_test, y_test], axis=1)\n return res\n\n def input_batch_generator(self, split_name, is_training=False,\n batch_size=32, get_filenames=False, get_sparselabel=True,\n get_denselabel=True):\n samples = self.get_samples(split_name)\n self.prepare_get_filenames = get_filenames\n self.prepare_get_sparselabel = get_sparselabel\n self.parepare_get_denselabel = get_denselabel\n gen = self.__generator(samples, batch_size, is_training=is_training)\n return gen, len(samples)\n\n def run(self):\n batch_size = 32\n split_name = 'sample_test'\n generator, dataset_size = self.input_batch_generator(split_name,\n is_training=True, batch_size=batch_size, get_filenames=True,\n get_sparselabel=True)\n num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size))\n )\n for _ in range(num_batches_per_epoch):\n batch_inputs, batch_labels_sparse, batch_labels = next(generator)\n batch_inputs = np.array(batch_inputs)\n print(batch_labels)\n print('batch_size={}'.format(len(batch_labels)))\n vis = True\n if vis:\n grid = vis_utils.visualize_grid(batch_inputs[:4])\n grid = np.squeeze(grid)\n plt.imshow(grid, cmap='gray')\n plt.show()\n break\n return\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass FLAGS(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass PrepareData:\n\n def __init__(self):\n return\n\n def sparse_tuple_from_label(self, sequences, dtype=np.int32):\n \"\"\"Create a sparse representention of x.\n Args:\n sequences: a list of lists of type dtype where each element is a sequence\n Returns:\n A tuple with (indices, values, shape)\n \"\"\"\n indices = []\n values = []\n for n, seq in enumerate(sequences):\n indices.extend(zip([n] * len(seq), range(len(seq))))\n values.extend(seq)\n indices = np.asarray(indices, dtype=np.int64)\n values = np.asarray(values, dtype=dtype)\n shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] +\n 1], dtype=np.int64)\n return indices, values, shape\n\n def preprocess_samples(self, samples):\n batch_inputs = []\n batch_labels = []\n for sample in samples:\n im, label = sample[:FLAGS.image_height * FLAGS.image_width\n ], sample[FLAGS.image_height * FLAGS.image_width:]\n label = label.astype(np.int32).tolist()\n im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width,\n FLAGS.image_channel])\n batch_inputs.append(im)\n batch_labels.append(label)\n res = [batch_inputs]\n if self.prepare_get_sparselabel:\n res.append(self.sparse_tuple_from_label(batch_labels))\n if self.parepare_get_denselabel:\n res.append(batch_labels)\n return res\n\n def __generator(self, samples, batch_size, is_training=True):\n num_samples = len(samples)\n while 1:\n if is_training:\n samples = shuffle(samples)\n for offset in range(0, num_samples, batch_size):\n if is_training and offset + batch_size > num_samples - 1:\n continue\n batch_samples = samples[offset:offset + batch_size]\n yield self.preprocess_samples(batch_samples)\n\n def get_samples(self, split_name):\n mnist_sequence = './data/mnist_sequence3_sample_8distortions_9x9.npz'\n data = np.load(mnist_sequence)\n x_train, y_train = data['X_train'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_train']\n x_valid, y_valid = data['X_valid'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_valid']\n x_test, y_test = data['X_test'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_test']\n if split_name == 'train':\n res = np.concatenate([x_train, y_train], axis=1)\n elif split_name == 'sample_test':\n res = np.concatenate([x_train[:100], y_train[:100]], axis=1)\n elif split_name == 'eval':\n res = np.concatenate([x_valid, y_valid], axis=1)\n else:\n res = np.concatenate([x_test, y_test], axis=1)\n return res\n\n def input_batch_generator(self, split_name, is_training=False,\n batch_size=32, get_filenames=False, get_sparselabel=True,\n get_denselabel=True):\n samples = self.get_samples(split_name)\n self.prepare_get_filenames = get_filenames\n self.prepare_get_sparselabel = get_sparselabel\n self.parepare_get_denselabel = get_denselabel\n gen = self.__generator(samples, batch_size, is_training=is_training)\n return gen, len(samples)\n\n def run(self):\n batch_size = 32\n split_name = 'sample_test'\n generator, dataset_size = self.input_batch_generator(split_name,\n is_training=True, batch_size=batch_size, get_filenames=True,\n get_sparselabel=True)\n num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size))\n )\n for _ in range(num_batches_per_epoch):\n batch_inputs, batch_labels_sparse, batch_labels = next(generator)\n batch_inputs = np.array(batch_inputs)\n print(batch_labels)\n print('batch_size={}'.format(len(batch_labels)))\n vis = True\n if vis:\n grid = vis_utils.visualize_grid(batch_inputs[:4])\n grid = np.squeeze(grid)\n plt.imshow(grid, cmap='gray')\n plt.show()\n break\n return\n\n\n<mask token>\n",
"step-4": "import matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn.utils import shuffle\nimport math\nimport vis_utils\n\n\nclass FLAGS(object):\n image_height = 100\n image_width = 100\n image_channel = 1\n CORRECT_ORIENTATION = True\n\n\nclass PrepareData:\n\n def __init__(self):\n return\n\n def sparse_tuple_from_label(self, sequences, dtype=np.int32):\n \"\"\"Create a sparse representention of x.\n Args:\n sequences: a list of lists of type dtype where each element is a sequence\n Returns:\n A tuple with (indices, values, shape)\n \"\"\"\n indices = []\n values = []\n for n, seq in enumerate(sequences):\n indices.extend(zip([n] * len(seq), range(len(seq))))\n values.extend(seq)\n indices = np.asarray(indices, dtype=np.int64)\n values = np.asarray(values, dtype=dtype)\n shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] +\n 1], dtype=np.int64)\n return indices, values, shape\n\n def preprocess_samples(self, samples):\n batch_inputs = []\n batch_labels = []\n for sample in samples:\n im, label = sample[:FLAGS.image_height * FLAGS.image_width\n ], sample[FLAGS.image_height * FLAGS.image_width:]\n label = label.astype(np.int32).tolist()\n im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width,\n FLAGS.image_channel])\n batch_inputs.append(im)\n batch_labels.append(label)\n res = [batch_inputs]\n if self.prepare_get_sparselabel:\n res.append(self.sparse_tuple_from_label(batch_labels))\n if self.parepare_get_denselabel:\n res.append(batch_labels)\n return res\n\n def __generator(self, samples, batch_size, is_training=True):\n num_samples = len(samples)\n while 1:\n if is_training:\n samples = shuffle(samples)\n for offset in range(0, num_samples, batch_size):\n if is_training and offset + batch_size > num_samples - 1:\n continue\n batch_samples = samples[offset:offset + batch_size]\n yield self.preprocess_samples(batch_samples)\n\n def get_samples(self, split_name):\n mnist_sequence = './data/mnist_sequence3_sample_8distortions_9x9.npz'\n data = np.load(mnist_sequence)\n x_train, y_train = data['X_train'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_train']\n x_valid, y_valid = data['X_valid'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_valid']\n x_test, y_test = data['X_test'].reshape((-1, FLAGS.image_height *\n FLAGS.image_width)), data['y_test']\n if split_name == 'train':\n res = np.concatenate([x_train, y_train], axis=1)\n elif split_name == 'sample_test':\n res = np.concatenate([x_train[:100], y_train[:100]], axis=1)\n elif split_name == 'eval':\n res = np.concatenate([x_valid, y_valid], axis=1)\n else:\n res = np.concatenate([x_test, y_test], axis=1)\n return res\n\n def input_batch_generator(self, split_name, is_training=False,\n batch_size=32, get_filenames=False, get_sparselabel=True,\n get_denselabel=True):\n samples = self.get_samples(split_name)\n self.prepare_get_filenames = get_filenames\n self.prepare_get_sparselabel = get_sparselabel\n self.parepare_get_denselabel = get_denselabel\n gen = self.__generator(samples, batch_size, is_training=is_training)\n return gen, len(samples)\n\n def run(self):\n batch_size = 32\n split_name = 'sample_test'\n generator, dataset_size = self.input_batch_generator(split_name,\n is_training=True, batch_size=batch_size, get_filenames=True,\n get_sparselabel=True)\n num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size))\n )\n for _ in range(num_batches_per_epoch):\n batch_inputs, batch_labels_sparse, batch_labels = next(generator)\n batch_inputs = np.array(batch_inputs)\n print(batch_labels)\n print('batch_size={}'.format(len(batch_labels)))\n vis = True\n if vis:\n grid = vis_utils.visualize_grid(batch_inputs[:4])\n grid = np.squeeze(grid)\n plt.imshow(grid, cmap='gray')\n plt.show()\n break\n return\n\n\nif __name__ == '__main__':\n obj = PrepareData()\n obj.run()\n",
"step-5": "import matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn.utils import shuffle\nimport math\nimport vis_utils\n\n\nclass FLAGS(object):\n image_height = 100\n image_width = 100\n image_channel = 1\n \n CORRECT_ORIENTATION = True\n \n \n \nclass PrepareData():\n def __init__(self):\n \n return\n def sparse_tuple_from_label(self, sequences, dtype=np.int32):\n \"\"\"Create a sparse representention of x.\n Args:\n sequences: a list of lists of type dtype where each element is a sequence\n Returns:\n A tuple with (indices, values, shape)\n \"\"\"\n indices = []\n values = []\n \n for n, seq in enumerate(sequences):\n indices.extend(zip([n] * len(seq), range(len(seq))))\n values.extend(seq)\n \n indices = np.asarray(indices, dtype=np.int64)\n values = np.asarray(values, dtype=dtype)\n shape = np.asarray([len(sequences), np.asarray(indices).max(0)[1] + 1], dtype=np.int64)\n \n return indices, values, shape\n def preprocess_samples(self, samples):\n batch_inputs = []\n batch_labels = []\n\n for sample in samples:\n im,label = sample[:FLAGS.image_height * FLAGS.image_width], sample[FLAGS.image_height * FLAGS.image_width:]\n label = label.astype(np.int32).tolist()\n im = np.reshape(im, [FLAGS.image_height, FLAGS.image_width, FLAGS.image_channel])\n batch_inputs.append(im)\n batch_labels.append(label)\n \n \n res = [batch_inputs]\n if self.prepare_get_sparselabel:\n res.append(self.sparse_tuple_from_label(batch_labels))\n if self.parepare_get_denselabel:\n res.append(batch_labels)\n \n return res\n \n def __generator(self, samples, batch_size,is_training=True):\n num_samples = len(samples)\n while 1: # Loop forever so the generator never terminates\n if is_training:\n #during traning, shuffle the whole samples at the beginningof the epoch\n samples = shuffle(samples)\n for offset in range(0, num_samples, batch_size):\n if is_training and (offset+batch_size > num_samples -1 ):\n # this is to make sure all the batch are of same sizes during training\n continue\n batch_samples = samples[offset:offset+batch_size]\n yield self.preprocess_samples(batch_samples)\n\n \n def get_samples(self, split_name):\n mnist_sequence = \"./data/mnist_sequence3_sample_8distortions_9x9.npz\"\n data = np.load(mnist_sequence)\n\n x_train, y_train = data['X_train'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_train']\n x_valid, y_valid = data['X_valid'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_valid']\n x_test, y_test = data['X_test'].reshape((-1, FLAGS.image_height * FLAGS.image_width)), data['y_test']\n \n if split_name == \"train\":\n res = np.concatenate([x_train, y_train], axis=1)\n elif split_name == \"sample_test\":\n res = np.concatenate([x_train[:100], y_train[:100]], axis=1)\n elif split_name == \"eval\":\n res = np.concatenate([x_valid, y_valid], axis=1)\n else:\n res = np.concatenate([x_test, y_test], axis=1)\n\n return res\n \n def input_batch_generator(self, split_name, is_training=False, batch_size=32, get_filenames = False, get_sparselabel = True, get_denselabel = True):\n samples = self.get_samples(split_name) \n self.prepare_get_filenames = get_filenames\n self.prepare_get_sparselabel = get_sparselabel\n self.parepare_get_denselabel = get_denselabel\n gen = self.__generator(samples, batch_size, is_training=is_training)\n \n return gen, len(samples)\n \n def run(self):\n\n batch_size = 32\n split_name = 'sample_test'\n# split_name = 'train'\n# split_name = 'eval'\n generator, dataset_size = self.input_batch_generator(split_name, is_training=True, batch_size=batch_size, get_filenames=True,get_sparselabel = True)\n num_batches_per_epoch = int(math.ceil(dataset_size / float(batch_size)))\n for _ in range(num_batches_per_epoch):\n batch_inputs, batch_labels_sparse, batch_labels = next(generator)\n batch_inputs = np.array(batch_inputs)\n print(batch_labels)\n print(\"batch_size={}\".format(len(batch_labels)))\n vis = True\n if vis:\n grid = vis_utils.visualize_grid(batch_inputs[:4])\n grid = np.squeeze(grid)\n plt.imshow(grid, cmap='gray')\n plt.show()\n break\n \n return\n \n \n\n\nif __name__ == \"__main__\": \n obj= PrepareData()\n obj.run()",
"step-ids": [
7,
8,
9,
12,
13
]
}
|
[
7,
8,
9,
12,
13
] |
<|reserved_special_token_0|>
class Ninja:
def __init__(self, first_name, last_name, treats, pet_food, pet):
self.first_name = first_name
self.last_name = last_name
self.treats = treats
self.pet_food = pet_food
self.pet = pet
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def bathe(self):
self.pet.noise()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ninja:
def __init__(self, first_name, last_name, treats, pet_food, pet):
self.first_name = first_name
self.last_name = last_name
self.treats = treats
self.pet_food = pet_food
self.pet = pet
def walk(self):
self.pet.play()
def feed(self):
self.pet.eat()
def bathe(self):
self.pet.noise()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ninja:
def __init__(self, first_name, last_name, treats, pet_food, pet):
self.first_name = first_name
self.last_name = last_name
self.treats = treats
self.pet_food = pet_food
self.pet = pet
def walk(self):
self.pet.play()
def feed(self):
self.pet.eat()
def bathe(self):
self.pet.noise()
<|reserved_special_token_0|>
Naruto.feed()
print(Naruto.pet.energy)
print(Naruto.pet.health)
Naruto.bathe()
Naruto.walk()
print(Naruto.pet.energy)
print(Naruto.pet.health)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ninja:
def __init__(self, first_name, last_name, treats, pet_food, pet):
self.first_name = first_name
self.last_name = last_name
self.treats = treats
self.pet_food = pet_food
self.pet = pet
def walk(self):
self.pet.play()
def feed(self):
self.pet.eat()
def bathe(self):
self.pet.noise()
Fox = Pet('Ninetailed Fox', 'Fox', 'Fire-Breathing')
Naruto = Ninja('Naruto', 'Izumaki', 'Rice Balls', 'Ground Beef', Fox)
Naruto.feed()
print(Naruto.pet.energy)
print(Naruto.pet.health)
Naruto.bathe()
Naruto.walk()
print(Naruto.pet.energy)
print(Naruto.pet.health)
<|reserved_special_token_1|>
from pet import Pet
class Ninja:
def __init__(self, first_name, last_name, treats, pet_food, pet):
self.first_name = first_name
self.last_name = last_name
self.treats = treats
self.pet_food = pet_food
self.pet = pet
def walk(self):
self.pet.play()
def feed(self):
self.pet.eat()
def bathe(self):
self.pet.noise()
Fox = Pet("Ninetailed Fox", "Fox", "Fire-Breathing")
Naruto = Ninja("Naruto", "Izumaki", "Rice Balls", "Ground Beef", Fox)
Naruto.feed()
print(Naruto.pet.energy)
print(Naruto.pet.health)
Naruto.bathe()
Naruto.walk()
print(Naruto.pet.energy)
print(Naruto.pet.health)
|
flexible
|
{
"blob_id": "b210784a198eaa3e57b5a65ec182a746aecc0e2b",
"index": 1695,
"step-1": "<mask token>\n\n\nclass Ninja:\n\n def __init__(self, first_name, last_name, treats, pet_food, pet):\n self.first_name = first_name\n self.last_name = last_name\n self.treats = treats\n self.pet_food = pet_food\n self.pet = pet\n <mask token>\n <mask token>\n\n def bathe(self):\n self.pet.noise()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Ninja:\n\n def __init__(self, first_name, last_name, treats, pet_food, pet):\n self.first_name = first_name\n self.last_name = last_name\n self.treats = treats\n self.pet_food = pet_food\n self.pet = pet\n\n def walk(self):\n self.pet.play()\n\n def feed(self):\n self.pet.eat()\n\n def bathe(self):\n self.pet.noise()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Ninja:\n\n def __init__(self, first_name, last_name, treats, pet_food, pet):\n self.first_name = first_name\n self.last_name = last_name\n self.treats = treats\n self.pet_food = pet_food\n self.pet = pet\n\n def walk(self):\n self.pet.play()\n\n def feed(self):\n self.pet.eat()\n\n def bathe(self):\n self.pet.noise()\n\n\n<mask token>\nNaruto.feed()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)\nNaruto.bathe()\nNaruto.walk()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)\n",
"step-4": "<mask token>\n\n\nclass Ninja:\n\n def __init__(self, first_name, last_name, treats, pet_food, pet):\n self.first_name = first_name\n self.last_name = last_name\n self.treats = treats\n self.pet_food = pet_food\n self.pet = pet\n\n def walk(self):\n self.pet.play()\n\n def feed(self):\n self.pet.eat()\n\n def bathe(self):\n self.pet.noise()\n\n\nFox = Pet('Ninetailed Fox', 'Fox', 'Fire-Breathing')\nNaruto = Ninja('Naruto', 'Izumaki', 'Rice Balls', 'Ground Beef', Fox)\nNaruto.feed()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)\nNaruto.bathe()\nNaruto.walk()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)\n",
"step-5": "from pet import Pet \n\nclass Ninja:\n def __init__(self, first_name, last_name, treats, pet_food, pet):\n self.first_name = first_name\n self.last_name = last_name\n self.treats = treats\n self.pet_food = pet_food\n self.pet = pet\n\n\n def walk(self):\n self.pet.play()\n\n\n def feed(self):\n self.pet.eat()\n\n\n def bathe(self):\n self.pet.noise()\n\n\n\nFox = Pet(\"Ninetailed Fox\", \"Fox\", \"Fire-Breathing\")\nNaruto = Ninja(\"Naruto\", \"Izumaki\", \"Rice Balls\", \"Ground Beef\", Fox)\n\n\nNaruto.feed()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)\nNaruto.bathe()\nNaruto.walk()\nprint(Naruto.pet.energy)\nprint(Naruto.pet.health)",
"step-ids": [
3,
5,
6,
7,
9
]
}
|
[
3,
5,
6,
7,
9
] |
import basevcstest
class TestVCSBoxfill(basevcstest.VCSBaseTest):
def testRobinsonBoxfill(self):
# This tests if extending the longitude to more than 360 decrees is handled correctly by
# proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more
# information.
clt3 = self.clt('clt', latitude=(-90.0, 90.0), squeeze=1,
longitude=(-180, 200.0), time=('1979-01', '1988-12'),)
gmBoxfill = self.x.getboxfill('a_robinson_boxfill')
kwargs = {}
kwargs['cdmsfile'] = self.clt.id
kwargs['bg'] = self.bg
self.x.plot(clt3, gmBoxfill, **kwargs)
self.checkImage("test_vcs_boxfill_robinson_wrap.png")
|
normal
|
{
"blob_id": "c1475209d9c9a98d72d7f703e0516aceaeb13163",
"index": 6820,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestVCSBoxfill(basevcstest.VCSBaseTest):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TestVCSBoxfill(basevcstest.VCSBaseTest):\n\n def testRobinsonBoxfill(self):\n clt3 = self.clt('clt', latitude=(-90.0, 90.0), squeeze=1, longitude\n =(-180, 200.0), time=('1979-01', '1988-12'))\n gmBoxfill = self.x.getboxfill('a_robinson_boxfill')\n kwargs = {}\n kwargs['cdmsfile'] = self.clt.id\n kwargs['bg'] = self.bg\n self.x.plot(clt3, gmBoxfill, **kwargs)\n self.checkImage('test_vcs_boxfill_robinson_wrap.png')\n",
"step-4": "import basevcstest\n\n\nclass TestVCSBoxfill(basevcstest.VCSBaseTest):\n\n def testRobinsonBoxfill(self):\n clt3 = self.clt('clt', latitude=(-90.0, 90.0), squeeze=1, longitude\n =(-180, 200.0), time=('1979-01', '1988-12'))\n gmBoxfill = self.x.getboxfill('a_robinson_boxfill')\n kwargs = {}\n kwargs['cdmsfile'] = self.clt.id\n kwargs['bg'] = self.bg\n self.x.plot(clt3, gmBoxfill, **kwargs)\n self.checkImage('test_vcs_boxfill_robinson_wrap.png')\n",
"step-5": "import basevcstest\n\n\nclass TestVCSBoxfill(basevcstest.VCSBaseTest):\n def testRobinsonBoxfill(self):\n # This tests if extending the longitude to more than 360 decrees is handled correctly by\n # proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more\n # information.\n clt3 = self.clt('clt', latitude=(-90.0, 90.0), squeeze=1,\n longitude=(-180, 200.0), time=('1979-01', '1988-12'),)\n gmBoxfill = self.x.getboxfill('a_robinson_boxfill')\n kwargs = {}\n kwargs['cdmsfile'] = self.clt.id\n kwargs['bg'] = self.bg\n self.x.plot(clt3, gmBoxfill, **kwargs)\n self.checkImage(\"test_vcs_boxfill_robinson_wrap.png\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
start = input()
user_list = start.split()
if user_list[-1] == 'wolf':
print('Please go away and stop eating my sheep')
else:
user_list.reverse()
print(f'Oi! Sheep number {user_list.index("wolf,") }! You are about to be eaten by a wolf!')
|
normal
|
{
"blob_id": "16850d931eec0356f71317cc24461e006fbcd59c",
"index": 6192,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif user_list[-1] == 'wolf':\n print('Please go away and stop eating my sheep')\nelse:\n user_list.reverse()\n print(\n f\"Oi! Sheep number {user_list.index('wolf,')}! You are about to be eaten by a wolf!\"\n )\n",
"step-3": "start = input()\nuser_list = start.split()\nif user_list[-1] == 'wolf':\n print('Please go away and stop eating my sheep')\nelse:\n user_list.reverse()\n print(\n f\"Oi! Sheep number {user_list.index('wolf,')}! You are about to be eaten by a wolf!\"\n )\n",
"step-4": "start = input()\n\nuser_list = start.split()\n\nif user_list[-1] == 'wolf':\n print('Please go away and stop eating my sheep')\nelse:\n user_list.reverse()\n print(f'Oi! Sheep number {user_list.index(\"wolf,\") }! You are about to be eaten by a wolf!')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('index', '0003_auto_20200330_0444')]
operations = [migrations.AlterField(model_name='information', name=
'comment', field=models.CharField(blank=True, max_length=200, null=
True)), migrations.AlterField(model_name='information', name=
'picture', field=models.ImageField(blank=True, null=True, upload_to
='images/')), migrations.AlterField(model_name='myclass', name=
'day', field=models.CharField(blank=True, max_length=1, null=True)),
migrations.AlterField(model_name='myclass', name='period', field=
models.CharField(blank=True, max_length=10, null=True)), migrations
.AlterField(model_name='myclass', name='place', field=models.
CharField(blank=True, max_length=50, null=True))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('index', '0003_auto_20200330_0444')]
operations = [migrations.AlterField(model_name='information', name=
'comment', field=models.CharField(blank=True, max_length=200, null=
True)), migrations.AlterField(model_name='information', name=
'picture', field=models.ImageField(blank=True, null=True, upload_to
='images/')), migrations.AlterField(model_name='myclass', name=
'day', field=models.CharField(blank=True, max_length=1, null=True)),
migrations.AlterField(model_name='myclass', name='period', field=
models.CharField(blank=True, max_length=10, null=True)), migrations
.AlterField(model_name='myclass', name='place', field=models.
CharField(blank=True, max_length=50, null=True))]
<|reserved_special_token_1|>
# Generated by Django 3.0.4 on 2020-03-29 19:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('index', '0003_auto_20200330_0444'),
]
operations = [
migrations.AlterField(
model_name='information',
name='comment',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='information',
name='picture',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='myclass',
name='day',
field=models.CharField(blank=True, max_length=1, null=True),
),
migrations.AlterField(
model_name='myclass',
name='period',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='myclass',
name='place',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
flexible
|
{
"blob_id": "72c1226d40b3cdce29ef28493344c3cf68892149",
"index": 6001,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('index', '0003_auto_20200330_0444')]\n operations = [migrations.AlterField(model_name='information', name=\n 'comment', field=models.CharField(blank=True, max_length=200, null=\n True)), migrations.AlterField(model_name='information', name=\n 'picture', field=models.ImageField(blank=True, null=True, upload_to\n ='images/')), migrations.AlterField(model_name='myclass', name=\n 'day', field=models.CharField(blank=True, max_length=1, null=True)),\n migrations.AlterField(model_name='myclass', name='period', field=\n models.CharField(blank=True, max_length=10, null=True)), migrations\n .AlterField(model_name='myclass', name='place', field=models.\n CharField(blank=True, max_length=50, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('index', '0003_auto_20200330_0444')]\n operations = [migrations.AlterField(model_name='information', name=\n 'comment', field=models.CharField(blank=True, max_length=200, null=\n True)), migrations.AlterField(model_name='information', name=\n 'picture', field=models.ImageField(blank=True, null=True, upload_to\n ='images/')), migrations.AlterField(model_name='myclass', name=\n 'day', field=models.CharField(blank=True, max_length=1, null=True)),\n migrations.AlterField(model_name='myclass', name='period', field=\n models.CharField(blank=True, max_length=10, null=True)), migrations\n .AlterField(model_name='myclass', name='place', field=models.\n CharField(blank=True, max_length=50, null=True))]\n",
"step-5": "# Generated by Django 3.0.4 on 2020-03-29 19:51\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('index', '0003_auto_20200330_0444'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='information',\n name='comment',\n field=models.CharField(blank=True, max_length=200, null=True),\n ),\n migrations.AlterField(\n model_name='information',\n name='picture',\n field=models.ImageField(blank=True, null=True, upload_to='images/'),\n ),\n migrations.AlterField(\n model_name='myclass',\n name='day',\n field=models.CharField(blank=True, max_length=1, null=True),\n ),\n migrations.AlterField(\n model_name='myclass',\n name='period',\n field=models.CharField(blank=True, max_length=10, null=True),\n ),\n migrations.AlterField(\n model_name='myclass',\n name='place',\n field=models.CharField(blank=True, max_length=50, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
dataset_path = 'data/output/dataset{toReplace}.csv'
dataset_path_final = 'data/output/final/datasetFinal.csv'
log_path = 'data/logs/output_append.log'
numberOfThreads = 45
inputFileMalign = 'data/input/malign/all.log'
outputFileMalign = 'data/output/fileMalign.csv'
sampleMalign = 300
inputFileBenignAAAA = 'data/input/benign/aaaa/all.log'
outputFileBenignAAA = 'data/output/fileBenignAAAA.csv'
sampleAAAA = 100
inputFileBenignCNAME = 'data/input/benign/cname/all.log'
outputFileBenignCNAME = 'data/output/fileBenignCNAME.csv'
sampleCNAME = 100
inputFileBenignMX = 'data/input/benign/mx/all.log'
outputFileBenignMX = 'data/output/fileBenignMX.csv'
sampleMX = 100
alexaDbPath = 'utils/Database/AlexaDB/top-1m.csv'
ports = [80, 443, 21, 22, 23, 25, 53, 110, 143, 161, 445, 465, 587, 993,
995, 3306, 3389, 7547, 8080, 8888]
fileHeader = (
'Domain,DNSRecordType,MXDnsResponse,TXTDnsResponse,HasSPFInfo,HasDkimInfo,HasDmarcInfo,Ip,DomainInAlexaDB,CommonPorts,CountryCode,RegisteredCountry,CreationDate,LastUpdateDate,ASN,HttpResponseCode,RegisteredOrg,SubdomainNumber,Entropy,EntropyOfSubDomains,StrangeCharacters,TLD,IpReputation,DomainReputation,ConsoantRatio,NumericRatio,SpecialCharRatio,VowelRatio,ConsoantSequence,VowelSequence,NumericSequence,SpecialCharSequence,DomainLength,Class'
)
headerRegex = """%s,%s,%d,%d,%d,%d,%d,%s,%d,%d,%s,%s,%d,%d,%d,%d,%s,%d,%d,%d,%d,%s,%d,%d,%0.1f,%0.1f,%0.1f,%0.1f,%d,%d,%d,%d,%d,%d
"""
sublist3rEngines = 'bing,passivedns'
<|reserved_special_token_1|>
# Copyright (C) 2020 Claudio Marques - All Rights Reserved
dataset_path = "data/output/dataset{toReplace}.csv"
dataset_path_final = "data/output/final/datasetFinal.csv"
log_path = "data/logs/output_append.log"
numberOfThreads = 45
inputFileMalign = "data/input/malign/all.log"
outputFileMalign = "data/output/fileMalign.csv"
sampleMalign = 300
inputFileBenignAAAA = "data/input/benign/aaaa/all.log"
outputFileBenignAAA = "data/output/fileBenignAAAA.csv"
sampleAAAA = 100
inputFileBenignCNAME = "data/input/benign/cname/all.log"
outputFileBenignCNAME = "data/output/fileBenignCNAME.csv"
sampleCNAME = 100
inputFileBenignMX = "data/input/benign/mx/all.log"
outputFileBenignMX = "data/output/fileBenignMX.csv"
sampleMX = 100
alexaDbPath = "utils/Database/AlexaDB/top-1m.csv"
ports = [80, 443, 21, 22, 23, 25, 53, 110, 143, 161, 445, 465, 587, 993, 995, 3306, 3389, 7547, 8080, 8888]
fileHeader = "Domain,DNSRecordType,MXDnsResponse,TXTDnsResponse,HasSPFInfo,HasDkimInfo,HasDmarcInfo,Ip,DomainInAlexaDB,CommonPorts,CountryCode,RegisteredCountry,CreationDate," \
"LastUpdateDate,ASN,HttpResponseCode,RegisteredOrg,SubdomainNumber,Entropy,EntropyOfSubDomains,StrangeCharacters," \
"TLD,IpReputation,DomainReputation," \
"ConsoantRatio,NumericRatio,SpecialCharRatio,VowelRatio,ConsoantSequence,VowelSequence,NumericSequence,SpecialCharSequence,DomainLength,Class"
headerRegex = "%s,%s,%d,%d,%d,%d,%d,%s,%d,%d,%s,%s,%d," \
"%d,%d,%d,%s,%d,%d,%d,%d," \
"%s,%d,%d," \
"%0.1f,%0.1f,%0.1f,%0.1f,%d,%d,%d,%d,%d,%d\n"
sublist3rEngines = "bing,passivedns"
|
flexible
|
{
"blob_id": "305133d4840741bd5c318a99a96660d8988dd61a",
"index": 7772,
"step-1": "<mask token>\n",
"step-2": "dataset_path = 'data/output/dataset{toReplace}.csv'\ndataset_path_final = 'data/output/final/datasetFinal.csv'\nlog_path = 'data/logs/output_append.log'\nnumberOfThreads = 45\ninputFileMalign = 'data/input/malign/all.log'\noutputFileMalign = 'data/output/fileMalign.csv'\nsampleMalign = 300\ninputFileBenignAAAA = 'data/input/benign/aaaa/all.log'\noutputFileBenignAAA = 'data/output/fileBenignAAAA.csv'\nsampleAAAA = 100\ninputFileBenignCNAME = 'data/input/benign/cname/all.log'\noutputFileBenignCNAME = 'data/output/fileBenignCNAME.csv'\nsampleCNAME = 100\ninputFileBenignMX = 'data/input/benign/mx/all.log'\noutputFileBenignMX = 'data/output/fileBenignMX.csv'\nsampleMX = 100\nalexaDbPath = 'utils/Database/AlexaDB/top-1m.csv'\nports = [80, 443, 21, 22, 23, 25, 53, 110, 143, 161, 445, 465, 587, 993, \n 995, 3306, 3389, 7547, 8080, 8888]\nfileHeader = (\n 'Domain,DNSRecordType,MXDnsResponse,TXTDnsResponse,HasSPFInfo,HasDkimInfo,HasDmarcInfo,Ip,DomainInAlexaDB,CommonPorts,CountryCode,RegisteredCountry,CreationDate,LastUpdateDate,ASN,HttpResponseCode,RegisteredOrg,SubdomainNumber,Entropy,EntropyOfSubDomains,StrangeCharacters,TLD,IpReputation,DomainReputation,ConsoantRatio,NumericRatio,SpecialCharRatio,VowelRatio,ConsoantSequence,VowelSequence,NumericSequence,SpecialCharSequence,DomainLength,Class'\n )\nheaderRegex = \"\"\"%s,%s,%d,%d,%d,%d,%d,%s,%d,%d,%s,%s,%d,%d,%d,%d,%s,%d,%d,%d,%d,%s,%d,%d,%0.1f,%0.1f,%0.1f,%0.1f,%d,%d,%d,%d,%d,%d\n\"\"\"\nsublist3rEngines = 'bing,passivedns'\n",
"step-3": "# Copyright (C) 2020 Claudio Marques - All Rights Reserved\r\ndataset_path = \"data/output/dataset{toReplace}.csv\"\r\ndataset_path_final = \"data/output/final/datasetFinal.csv\"\r\nlog_path = \"data/logs/output_append.log\"\r\nnumberOfThreads = 45\r\n\r\ninputFileMalign = \"data/input/malign/all.log\"\r\noutputFileMalign = \"data/output/fileMalign.csv\"\r\nsampleMalign = 300\r\n\r\ninputFileBenignAAAA = \"data/input/benign/aaaa/all.log\"\r\noutputFileBenignAAA = \"data/output/fileBenignAAAA.csv\"\r\nsampleAAAA = 100\r\n\r\ninputFileBenignCNAME = \"data/input/benign/cname/all.log\"\r\noutputFileBenignCNAME = \"data/output/fileBenignCNAME.csv\"\r\nsampleCNAME = 100\r\n\r\ninputFileBenignMX = \"data/input/benign/mx/all.log\"\r\noutputFileBenignMX = \"data/output/fileBenignMX.csv\"\r\nsampleMX = 100\r\n\r\nalexaDbPath = \"utils/Database/AlexaDB/top-1m.csv\"\r\n\r\nports = [80, 443, 21, 22, 23, 25, 53, 110, 143, 161, 445, 465, 587, 993, 995, 3306, 3389, 7547, 8080, 8888]\r\n\r\nfileHeader = \"Domain,DNSRecordType,MXDnsResponse,TXTDnsResponse,HasSPFInfo,HasDkimInfo,HasDmarcInfo,Ip,DomainInAlexaDB,CommonPorts,CountryCode,RegisteredCountry,CreationDate,\" \\\r\n \"LastUpdateDate,ASN,HttpResponseCode,RegisteredOrg,SubdomainNumber,Entropy,EntropyOfSubDomains,StrangeCharacters,\" \\\r\n \"TLD,IpReputation,DomainReputation,\" \\\r\n \"ConsoantRatio,NumericRatio,SpecialCharRatio,VowelRatio,ConsoantSequence,VowelSequence,NumericSequence,SpecialCharSequence,DomainLength,Class\"\r\n\r\nheaderRegex = \"%s,%s,%d,%d,%d,%d,%d,%s,%d,%d,%s,%s,%d,\" \\\r\n \"%d,%d,%d,%s,%d,%d,%d,%d,\" \\\r\n \"%s,%d,%d,\" \\\r\n \"%0.1f,%0.1f,%0.1f,%0.1f,%d,%d,%d,%d,%d,%d\\n\"\r\n\r\nsublist3rEngines = \"bing,passivedns\"\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
km=float(input())
cg=float(input())
print(round(km/cg,3),"km/l")
|
normal
|
{
"blob_id": "db33f7386d1eacbfbfd29aa367df310c557ae864",
"index": 8520,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(round(km / cg, 3), 'km/l')\n",
"step-3": "km = float(input())\ncg = float(input())\nprint(round(km / cg, 3), 'km/l')\n",
"step-4": "km=float(input())\ncg=float(input())\nprint(round(km/cg,3),\"km/l\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import setuptools
setuptools.setup(name='cppersist', install_requires=['Eve'])
|
normal
|
{
"blob_id": "4f1956b34ac3b55b2d40220b79816c139b4a2f5c",
"index": 9574,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetuptools.setup(name='cppersist', install_requires=['Eve'])\n",
"step-3": "import setuptools\nsetuptools.setup(name='cppersist', install_requires=['Eve'])\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from angrytux.model.game_objects.obstacle_states.HittedState import HittedState
from angrytux.model.game_objects.obstacle_states.ObstacleState import ObstacleState
class NewState(ObstacleState):
@property
def delete(self) ->bool:
"""
Don't delete this obstacle
:return: False
"""
return False
def hit(self) ->None:
"""
Just remove hit points of obstacle and change state
"""
self._obstacle.hit_points -= 1
self._obstacle.state = HittedState(self._obstacle)
|
normal
|
{
"blob_id": "7d21e76383b80e8a4433fb11cb3b64efee7a6d3b",
"index": 7008,
"step-1": "<mask token>\n\n\nclass NewState(ObstacleState):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass NewState(ObstacleState):\n <mask token>\n\n def hit(self) ->None:\n \"\"\"\n Just remove hit points of obstacle and change state\n \"\"\"\n self._obstacle.hit_points -= 1\n self._obstacle.state = HittedState(self._obstacle)\n",
"step-3": "<mask token>\n\n\nclass NewState(ObstacleState):\n\n @property\n def delete(self) ->bool:\n \"\"\"\n Don't delete this obstacle\n :return: False\n \"\"\"\n return False\n\n def hit(self) ->None:\n \"\"\"\n Just remove hit points of obstacle and change state\n \"\"\"\n self._obstacle.hit_points -= 1\n self._obstacle.state = HittedState(self._obstacle)\n",
"step-4": "from angrytux.model.game_objects.obstacle_states.HittedState import HittedState\nfrom angrytux.model.game_objects.obstacle_states.ObstacleState import ObstacleState\n\n\nclass NewState(ObstacleState):\n\n @property\n def delete(self) ->bool:\n \"\"\"\n Don't delete this obstacle\n :return: False\n \"\"\"\n return False\n\n def hit(self) ->None:\n \"\"\"\n Just remove hit points of obstacle and change state\n \"\"\"\n self._obstacle.hit_points -= 1\n self._obstacle.state = HittedState(self._obstacle)\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.