index
int64 0
100k
| blob_id
stringlengths 40
40
| code
stringlengths 7
7.27M
| steps
listlengths 1
1.25k
| error
bool 2
classes |
---|---|---|---|---|
99,000 |
6b1460651808b1e255ec97add7d14257088328fa
|
# modules to create diagrames
import flask
import io
import json
from flask import send_file, render_template
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import bokeh
from bokeh.plotting import figure
from bokeh.resources import CDN
from bokeh.embed import file_html
from bokeh.embed import json_item
from bokeh.palettes import Spectral6
from bokeh.transform import factor_cmap
from bokeh.sampledata.iris import flowers
import pandas as pd
from util import df_from_sql
uri_prefix = '/diagrams'
diagrams = flask.Blueprint('diagrams', __name__)
colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}
colors = [colormap[x] for x in flowers['species']]
def make_plot(x, y):
p = figure(title = "Iris Morphology", sizing_mode="fixed", plot_width=800, plot_height=400)
p.xaxis.axis_label = x
p.yaxis.axis_label = y
p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)
return p
@diagrams.route(f'{uri_prefix}/')
def root():
print (render_template('diagrams/root.html',resources=CDN.render(), prefix=uri_prefix))
return render_template('diagrams/root.html',resources=CDN.render(), prefix=uri_prefix)
@diagrams.route(f'{uri_prefix}/plot')
def plot():
sql= """SELECT * FROM items
LEFT JOIN
(SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM
(SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss
GROUP BY item_id, college_id) AS item_synt
ON items.item_id = item_synt.item_id
LEFT JOIN colleges
ON item_synt.college_id = colleges.college_id ;
"""
df = df_from_sql(sql)
df = df.loc[:,~df.columns.duplicated()]
df['size']=df['nb_item']*12
TOOLS="hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,"
TOOLTIPS = [
("item", "@item_name"),
("College","@college_name"),
("Nb essions", "@nb_item"),
]
p = figure(tools=TOOLS, title = "Items", sizing_mode="scale_both", plot_width=100, plot_height=50, tooltips=TOOLTIPS)
p.xaxis.axis_label = 'Serieux'
p.yaxis.axis_label = 'Age'
p.y_range.flipped = True
mapper = factor_cmap(field_name='college_id', factors=df['college_id'].dropna().values, palette=Spectral6)
print(df['college_id'].dropna().unique())
p.circle('avg_serieux', 'avg_age',source=df, color=mapper, size="size", fill_alpha=0.4, legend_field="college_name")
p.legend.location = "top_left"
p.legend.click_policy="hide"
return json.dumps(json_item(p, "myplot"))
@diagrams.route(f'{uri_prefix}/basic.png')
def basic():
plot = figure()
plot.circle([1,2], [3,4])
return file_html(plot, CDN, "my plot")
@diagrams.route(f'{uri_prefix}/plot.png')
def plot_png():
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
matplotlib.use('Agg')
# Fixing random state for reproducibility
np.random.seed(19680801)
mu, sigma = 100, 15
x = mu + sigma * np.random.randn(10000)
# the histogram of the data
n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)
plt.xlabel('Smarts')
plt.ylabel('Probability')
plt.title('Histogram of IQ')
plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.xlim(40, 160)
plt.ylim(0, 0.03)
plt.grid(True)
fig = plt.figure(1)
print(fig)
# draw(ax)
return fig_response(fig)
def fig_response(fig):
"""Turn a matplotlib Figure into Flask response"""
img_bytes = io.BytesIO()
fig.savefig(img_bytes)
img_bytes.seek(0)
return send_file(img_bytes, mimetype='image/png')
|
[
"# modules to create diagrames \nimport flask\nimport io\nimport json\n\nfrom flask import send_file, render_template\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\nfrom matplotlib.figure import Figure\n\nimport bokeh\nfrom bokeh.plotting import figure\nfrom bokeh.resources import CDN\nfrom bokeh.embed import file_html\nfrom bokeh.embed import json_item\nfrom bokeh.palettes import Spectral6\nfrom bokeh.transform import factor_cmap\n\nfrom bokeh.sampledata.iris import flowers\nimport pandas as pd\n\nfrom util import df_from_sql\n\nuri_prefix = '/diagrams'\n\ndiagrams = flask.Blueprint('diagrams', __name__)\n\n\n\ncolormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}\ncolors = [colormap[x] for x in flowers['species']]\n\ndef make_plot(x, y):\n p = figure(title = \"Iris Morphology\", sizing_mode=\"fixed\", plot_width=800, plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\[email protected](f'{uri_prefix}/')\ndef root():\n \n print (render_template('diagrams/root.html',resources=CDN.render(), prefix=uri_prefix))\n return render_template('diagrams/root.html',resources=CDN.render(), prefix=uri_prefix)\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n \n sql= \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:,~df.columns.duplicated()]\n df['size']=df['nb_item']*12\n\n TOOLS=\"hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,\"\n TOOLTIPS = [\n (\"item\", \"@item_name\"),\n (\"College\",\"@college_name\"),\n (\"Nb essions\", \"@nb_item\"),\n ]\n p = figure(tools=TOOLS, title = \"Items\", sizing_mode=\"scale_both\", plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age',source=df, color=mapper, size=\"size\", fill_alpha=0.4, legend_field=\"college_name\")\n p.legend.location = \"top_left\"\n p.legend.click_policy=\"hide\"\n return json.dumps(json_item(p, \"myplot\"))\n\n\n\n\n\n\n\n\n\n\n\n\[email protected](f'{uri_prefix}/basic.png')\ndef basic():\n plot = figure()\n plot.circle([1,2], [3,4])\n\n return file_html(plot, CDN, \"my plot\")\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n\n # Fixing random state for reproducibility\n np.random.seed(19680801)\n\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n\n # the histogram of the data\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, .025, r'$\\mu=100,\\ \\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n # draw(ax)\n return fig_response(fig)\n\ndef fig_response(fig):\n \"\"\"Turn a matplotlib Figure into Flask response\"\"\"\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')",
"import flask\nimport io\nimport json\nfrom flask import send_file, render_template\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\nfrom matplotlib.figure import Figure\nimport bokeh\nfrom bokeh.plotting import figure\nfrom bokeh.resources import CDN\nfrom bokeh.embed import file_html\nfrom bokeh.embed import json_item\nfrom bokeh.palettes import Spectral6\nfrom bokeh.transform import factor_cmap\nfrom bokeh.sampledata.iris import flowers\nimport pandas as pd\nfrom util import df_from_sql\nuri_prefix = '/diagrams'\ndiagrams = flask.Blueprint('diagrams', __name__)\ncolormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}\ncolors = [colormap[x] for x in flowers['species']]\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n sql = \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:, ~df.columns.duplicated()]\n df['size'] = df['nb_item'] * 12\n TOOLS = (\n 'hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,'\n )\n TOOLTIPS = [('item', '@item_name'), ('College', '@college_name'), (\n 'Nb essions', '@nb_item')]\n p = figure(tools=TOOLS, title='Items', sizing_mode='scale_both',\n plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].\n dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age', source=df, color=mapper, size='size',\n fill_alpha=0.4, legend_field='college_name')\n p.legend.location = 'top_left'\n p.legend.click_policy = 'hide'\n return json.dumps(json_item(p, 'myplot'))\n\n\[email protected](f'{uri_prefix}/basic.png')\ndef basic():\n plot = figure()\n plot.circle([1, 2], [3, 4])\n return file_html(plot, CDN, 'my plot')\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\ndef fig_response(fig):\n \"\"\"Turn a matplotlib Figure into Flask response\"\"\"\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')\n",
"<import token>\nuri_prefix = '/diagrams'\ndiagrams = flask.Blueprint('diagrams', __name__)\ncolormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}\ncolors = [colormap[x] for x in flowers['species']]\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n sql = \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:, ~df.columns.duplicated()]\n df['size'] = df['nb_item'] * 12\n TOOLS = (\n 'hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,'\n )\n TOOLTIPS = [('item', '@item_name'), ('College', '@college_name'), (\n 'Nb essions', '@nb_item')]\n p = figure(tools=TOOLS, title='Items', sizing_mode='scale_both',\n plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].\n dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age', source=df, color=mapper, size='size',\n fill_alpha=0.4, legend_field='college_name')\n p.legend.location = 'top_left'\n p.legend.click_policy = 'hide'\n return json.dumps(json_item(p, 'myplot'))\n\n\[email protected](f'{uri_prefix}/basic.png')\ndef basic():\n plot = figure()\n plot.circle([1, 2], [3, 4])\n return file_html(plot, CDN, 'my plot')\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\ndef fig_response(fig):\n \"\"\"Turn a matplotlib Figure into Flask response\"\"\"\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')\n",
"<import token>\n<assignment token>\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n sql = \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:, ~df.columns.duplicated()]\n df['size'] = df['nb_item'] * 12\n TOOLS = (\n 'hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,'\n )\n TOOLTIPS = [('item', '@item_name'), ('College', '@college_name'), (\n 'Nb essions', '@nb_item')]\n p = figure(tools=TOOLS, title='Items', sizing_mode='scale_both',\n plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].\n dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age', source=df, color=mapper, size='size',\n fill_alpha=0.4, legend_field='college_name')\n p.legend.location = 'top_left'\n p.legend.click_policy = 'hide'\n return json.dumps(json_item(p, 'myplot'))\n\n\[email protected](f'{uri_prefix}/basic.png')\ndef basic():\n plot = figure()\n plot.circle([1, 2], [3, 4])\n return file_html(plot, CDN, 'my plot')\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\ndef fig_response(fig):\n \"\"\"Turn a matplotlib Figure into Flask response\"\"\"\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')\n",
"<import token>\n<assignment token>\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n sql = \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:, ~df.columns.duplicated()]\n df['size'] = df['nb_item'] * 12\n TOOLS = (\n 'hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,'\n )\n TOOLTIPS = [('item', '@item_name'), ('College', '@college_name'), (\n 'Nb essions', '@nb_item')]\n p = figure(tools=TOOLS, title='Items', sizing_mode='scale_both',\n plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].\n dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age', source=df, color=mapper, size='size',\n fill_alpha=0.4, legend_field='college_name')\n p.legend.location = 'top_left'\n p.legend.click_policy = 'hide'\n return json.dumps(json_item(p, 'myplot'))\n\n\n<function token>\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\ndef fig_response(fig):\n \"\"\"Turn a matplotlib Figure into Flask response\"\"\"\n img_bytes = io.BytesIO()\n fig.savefig(img_bytes)\n img_bytes.seek(0)\n return send_file(img_bytes, mimetype='image/png')\n",
"<import token>\n<assignment token>\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\[email protected](f'{uri_prefix}/plot')\ndef plot():\n sql = \"\"\"SELECT * FROM items\n LEFT JOIN \n (SELECT item_id, college_id, AVG(mlss.age) as avg_age, AVG(mlss.serieux) as avg_serieux, COUNT(id) as nb_item FROM \n (SELECT id, item_id, college_id, serieux, DATE_DIFF(CURRENT_DATE(), `date`, DAY) as age from lss) as mlss\n GROUP BY item_id, college_id) AS item_synt\n ON items.item_id = item_synt.item_id\n LEFT JOIN colleges\n ON item_synt.college_id = colleges.college_id ;\n \"\"\"\n df = df_from_sql(sql)\n df = df.loc[:, ~df.columns.duplicated()]\n df['size'] = df['nb_item'] * 12\n TOOLS = (\n 'hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select,'\n )\n TOOLTIPS = [('item', '@item_name'), ('College', '@college_name'), (\n 'Nb essions', '@nb_item')]\n p = figure(tools=TOOLS, title='Items', sizing_mode='scale_both',\n plot_width=100, plot_height=50, tooltips=TOOLTIPS)\n p.xaxis.axis_label = 'Serieux'\n p.yaxis.axis_label = 'Age'\n p.y_range.flipped = True\n mapper = factor_cmap(field_name='college_id', factors=df['college_id'].\n dropna().values, palette=Spectral6)\n print(df['college_id'].dropna().unique())\n p.circle('avg_serieux', 'avg_age', source=df, color=mapper, size='size',\n fill_alpha=0.4, legend_field='college_name')\n p.legend.location = 'top_left'\n p.legend.click_policy = 'hide'\n return json.dumps(json_item(p, 'myplot'))\n\n\n<function token>\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\n<function token>\n<function token>\n\n\[email protected](f'{uri_prefix}/plot.png')\ndef plot_png():\n import numpy as np\n import matplotlib\n import matplotlib.pyplot as plt\n matplotlib.use('Agg')\n np.random.seed(19680801)\n mu, sigma = 100, 15\n x = mu + sigma * np.random.randn(10000)\n n, bins, patches = plt.hist(x, 50, density=True, facecolor='g', alpha=0.75)\n plt.xlabel('Smarts')\n plt.ylabel('Probability')\n plt.title('Histogram of IQ')\n plt.text(60, 0.025, '$\\\\mu=100,\\\\ \\\\sigma=15$')\n plt.xlim(40, 160)\n plt.ylim(0, 0.03)\n plt.grid(True)\n fig = plt.figure(1)\n print(fig)\n return fig_response(fig)\n\n\n<function token>\n",
"<import token>\n<assignment token>\n\n\ndef make_plot(x, y):\n p = figure(title='Iris Morphology', sizing_mode='fixed', plot_width=800,\n plot_height=400)\n p.xaxis.axis_label = x\n p.yaxis.axis_label = y\n p.circle(flowers[x], flowers[y], color=colors, fill_alpha=0.2, size=10)\n return p\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\[email protected](f'{uri_prefix}/')\ndef root():\n print(render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix))\n return render_template('diagrams/root.html', resources=CDN.render(),\n prefix=uri_prefix)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,001 |
dc764eef16c1554b9e87936fad35078c769b4fcd
|
from flask import Flask, request, render_template, redirect, Markup
from flask_restful import Resource, Api
from flask_cors import CORS, cross_origin
# from flaskext.markdown import Markdown
from sqlalchemy import create_engine
import MySQLdb
from json import dumps
from flask_jsonpify import jsonify
from collections import OrderedDict
import os
import json
import random
# import markdown
# API Formatter classes
from api_classes import Helper, GameInstance, PlayerInstance, TeamInstance, TourneyInstance
# Queries
from api_queries import game_query, player_query, team_query, tourney_query
from search_queries import search_game, search_player, search_team, search_tourney
'=====================START CONFIGURATION====================='
engine = create_engine(
'mysql://{0}:{1}@{2}:3306/{3}?charset=utf8'.format(
os.environ['DB_USER'],
os.environ['DB_PASS'],
os.environ['DB_HOST'],
os.environ['DB_NAME']))
app = Flask(__name__, static_url_path='/static')
# Markdown(app)
app.config["JSON_SORT_KEYS"] = False
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Access-Control-Allow-Origin'
api = Api(app)
'=====================END CONFIGURATION====================='
'=====================START UI ROUTING====================='
@app.route('/')
@cross_origin()
def home():
return render_template('api.html')
@app.errorhandler(404)
def page_not_found(e):
return "What are you looking for m8?"
'=====================END UI ROUTING====================='
'=====================START API====================='
class Players(Resource):
def get(self):
conn = engine.connect()
query = conn.execute(player_query())
list_players = []
for row in query:
player = PlayerInstance(row).get_dict()
list_players.append(player)
conn.close()
return jsonify(list_players)
class Player(Resource):
def get(self, player_id):
conn = engine.connect()
query = conn.execute(player_query(player_id))
row = query.fetchone()
player = PlayerInstance(row).get_dict()
conn.close()
return jsonify(player)
class Teams(Resource):
def get(self):
conn = engine.connect()
conn.execute("set @@session.group_concat_max_len=4294967295")
query = conn.execute(team_query())
list_teams = []
for row in query:
team = TeamInstance(row).get_dict()
list_teams.append(team)
conn.close()
return jsonify(list_teams)
class Team(Resource):
def get(self, team_id):
conn = engine.connect()
conn.execute("set @@session.group_concat_max_len=4294967295")
query = conn.execute(team_query(team_id))
row = query.fetchone()
team = TeamInstance(row).get_dict()
conn.close()
return jsonify(team)
class Tourneys(Resource):
def get(self):
conn = engine.connect()
query = conn.execute(tourney_query())
list_tourneys = []
for row in query:
tourney = TourneyInstance(row).get_dict()
list_tourneys.append(tourney)
conn.close()
return jsonify(list_tourneys)
class Tourney(Resource):
def get(self, tourney_id):
conn = engine.connect()
_ = conn.execute("set @@session.group_concat_max_len=18446744073709551615")
query = conn.execute(tourney_query(tourney_id))
row = query.fetchone()
tourney = TourneyInstance(row).get_dict()
conn.close()
return jsonify(tourney)
class Games(Resource):
def get(self):
conn = engine.connect()
_ = conn.execute("set @@session.group_concat_max_len=18446744073709551615")
query = conn.execute(game_query())
list_games = []
for row in query:
list_games.append(GameInstance(row).get_dict())
conn.close()
return jsonify(list_games)
class Game(Resource):
def get(self, game_id):
conn = engine.connect()
_ = conn.execute("set @@session.group_concat_max_len=18446744073709551615")
query = conn.execute(game_query(game_id))
row = query.fetchone()
conn.close()
return jsonify(GameInstance(row).get_dict())
class Search(Resource):
def get(self, search_str):
search_str = Helper.form_regex(search_str)
if search_str is None:
return jsonify(["Please enter at least one keyword."])
search_results = OrderedDict()
conn = engine.connect()
_ = conn.execute("set @@session.group_concat_max_len=18446744073709551615")
game_results = conn.execute(search_game(search_str))
player_results = conn.execute(search_player(search_str))
team_results = conn.execute(search_team(search_str))
tourney_results = conn.execute(search_tourney(search_str))
game_data = []
player_data = []
team_data = []
tourney_data = []
game_formatter = GameInstance()
player_formatter = PlayerInstance()
team_formatter = TeamInstance()
tourney_formatter = TourneyInstance()
for row in game_results:
game_data.append(game_formatter.get_dict(search=True, input_row=row))
for row in player_results:
player_data.append(player_formatter.get_dict(search=True, input_row=row))
for row in team_results:
team_data.append(team_formatter.get_dict(search=True, input_row=row))
for row in tourney_results:
tourney_data.append(tourney_formatter.get_dict(search=True, input_row=row))
search_results['games'] = game_data
search_results['players'] = player_data
search_results['teams'] = team_data
search_results['tournaments'] = tourney_data
return jsonify(search_results)
api.add_resource(Players, '/players', '/players/')
api.add_resource(Player, '/players/<player_id>')
api.add_resource(Teams, '/teams', '/teams/')
api.add_resource(Team, '/teams/<team_id>')
api.add_resource(Tourneys, '/tournaments', '/tournaments/')
api.add_resource(Tourney, '/tournaments/<tourney_id>')
api.add_resource(Games, '/games', '/games/')
api.add_resource(Game, '/games/<game_id>')
api.add_resource(Search, '/search/<search_str>')
'=====================END API====================='
if __name__ == '__main__':
app.run()
|
[
"from flask import Flask, request, render_template, redirect, Markup\nfrom flask_restful import Resource, Api\nfrom flask_cors import CORS, cross_origin\n# from flaskext.markdown import Markdown\nfrom sqlalchemy import create_engine\nimport MySQLdb\nfrom json import dumps\nfrom flask_jsonpify import jsonify\nfrom collections import OrderedDict\nimport os\nimport json\nimport random\n# import markdown\n\n# API Formatter classes\nfrom api_classes import Helper, GameInstance, PlayerInstance, TeamInstance, TourneyInstance\n\n# Queries\nfrom api_queries import game_query, player_query, team_query, tourney_query\nfrom search_queries import search_game, search_player, search_team, search_tourney\n\n\n'=====================START CONFIGURATION====================='\n\nengine = create_engine(\n 'mysql://{0}:{1}@{2}:3306/{3}?charset=utf8'.format(\n os.environ['DB_USER'],\n os.environ['DB_PASS'],\n os.environ['DB_HOST'],\n os.environ['DB_NAME']))\n\napp = Flask(__name__, static_url_path='/static')\n# Markdown(app)\napp.config[\"JSON_SORT_KEYS\"] = False\ncors = CORS(app)\napp.config['CORS_HEADERS'] = 'Access-Control-Allow-Origin'\napi = Api(app)\n\n'=====================END CONFIGURATION====================='\n'=====================START UI ROUTING====================='\n\n\[email protected]('/')\n@cross_origin()\ndef home():\n return render_template('api.html')\n\n\[email protected](404)\ndef page_not_found(e):\n return \"What are you looking for m8?\"\n\n\n'=====================END UI ROUTING====================='\n'=====================START API====================='\n\n\nclass Players(Resource):\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n def get(self):\n conn = engine.connect()\n conn.execute(\"set @@session.group_concat_max_len=4294967295\")\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n def get(self, team_id):\n conn = engine.connect()\n conn.execute(\"set @@session.group_concat_max_len=4294967295\")\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\"set @@session.group_concat_max_len=18446744073709551615\")\n\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\"set @@session.group_concat_max_len=18446744073709551615\")\n\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\"set @@session.group_concat_max_len=18446744073709551615\")\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify([\"Please enter at least one keyword.\"])\n\n search_results = OrderedDict()\n conn = engine.connect()\n\n _ = conn.execute(\"set @@session.group_concat_max_len=18446744073709551615\")\n\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row=row))\n\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True, input_row=row))\n\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row=row))\n\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True, input_row=row))\n\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n\n return jsonify(search_results)\n\napi.add_resource(Players, '/players', '/players/')\napi.add_resource(Player, '/players/<player_id>')\n\napi.add_resource(Teams, '/teams', '/teams/')\napi.add_resource(Team, '/teams/<team_id>')\n\napi.add_resource(Tourneys, '/tournaments', '/tournaments/')\napi.add_resource(Tourney, '/tournaments/<tourney_id>')\n\napi.add_resource(Games, '/games', '/games/')\napi.add_resource(Game, '/games/<game_id>')\n\napi.add_resource(Search, '/search/<search_str>')\n\n'=====================END API====================='\n\nif __name__ == '__main__':\n app.run()\n",
"from flask import Flask, request, render_template, redirect, Markup\nfrom flask_restful import Resource, Api\nfrom flask_cors import CORS, cross_origin\nfrom sqlalchemy import create_engine\nimport MySQLdb\nfrom json import dumps\nfrom flask_jsonpify import jsonify\nfrom collections import OrderedDict\nimport os\nimport json\nimport random\nfrom api_classes import Helper, GameInstance, PlayerInstance, TeamInstance, TourneyInstance\nfrom api_queries import game_query, player_query, team_query, tourney_query\nfrom search_queries import search_game, search_player, search_team, search_tourney\n<docstring token>\nengine = create_engine('mysql://{0}:{1}@{2}:3306/{3}?charset=utf8'.format(\n os.environ['DB_USER'], os.environ['DB_PASS'], os.environ['DB_HOST'], os\n .environ['DB_NAME']))\napp = Flask(__name__, static_url_path='/static')\napp.config['JSON_SORT_KEYS'] = False\ncors = CORS(app)\napp.config['CORS_HEADERS'] = 'Access-Control-Allow-Origin'\napi = Api(app)\n<docstring token>\n\n\[email protected]('/')\n@cross_origin()\ndef home():\n return render_template('api.html')\n\n\[email protected](404)\ndef page_not_found(e):\n return 'What are you looking for m8?'\n\n\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\napi.add_resource(Players, '/players', '/players/')\napi.add_resource(Player, '/players/<player_id>')\napi.add_resource(Teams, '/teams', '/teams/')\napi.add_resource(Team, '/teams/<team_id>')\napi.add_resource(Tourneys, '/tournaments', '/tournaments/')\napi.add_resource(Tourney, '/tournaments/<tourney_id>')\napi.add_resource(Games, '/games', '/games/')\napi.add_resource(Game, '/games/<game_id>')\napi.add_resource(Search, '/search/<search_str>')\n<docstring token>\nif __name__ == '__main__':\n app.run()\n",
"<import token>\n<docstring token>\nengine = create_engine('mysql://{0}:{1}@{2}:3306/{3}?charset=utf8'.format(\n os.environ['DB_USER'], os.environ['DB_PASS'], os.environ['DB_HOST'], os\n .environ['DB_NAME']))\napp = Flask(__name__, static_url_path='/static')\napp.config['JSON_SORT_KEYS'] = False\ncors = CORS(app)\napp.config['CORS_HEADERS'] = 'Access-Control-Allow-Origin'\napi = Api(app)\n<docstring token>\n\n\[email protected]('/')\n@cross_origin()\ndef home():\n return render_template('api.html')\n\n\[email protected](404)\ndef page_not_found(e):\n return 'What are you looking for m8?'\n\n\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\napi.add_resource(Players, '/players', '/players/')\napi.add_resource(Player, '/players/<player_id>')\napi.add_resource(Teams, '/teams', '/teams/')\napi.add_resource(Team, '/teams/<team_id>')\napi.add_resource(Tourneys, '/tournaments', '/tournaments/')\napi.add_resource(Tourney, '/tournaments/<tourney_id>')\napi.add_resource(Games, '/games', '/games/')\napi.add_resource(Game, '/games/<game_id>')\napi.add_resource(Search, '/search/<search_str>')\n<docstring token>\nif __name__ == '__main__':\n app.run()\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n\n\[email protected]('/')\n@cross_origin()\ndef home():\n return render_template('api.html')\n\n\[email protected](404)\ndef page_not_found(e):\n return 'What are you looking for m8?'\n\n\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\napi.add_resource(Players, '/players', '/players/')\napi.add_resource(Player, '/players/<player_id>')\napi.add_resource(Teams, '/teams', '/teams/')\napi.add_resource(Team, '/teams/<team_id>')\napi.add_resource(Tourneys, '/tournaments', '/tournaments/')\napi.add_resource(Tourney, '/tournaments/<tourney_id>')\napi.add_resource(Games, '/games', '/games/')\napi.add_resource(Game, '/games/<game_id>')\napi.add_resource(Search, '/search/<search_str>')\n<docstring token>\nif __name__ == '__main__':\n app.run()\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n\n\[email protected]('/')\n@cross_origin()\ndef home():\n return render_template('api.html')\n\n\[email protected](404)\ndef page_not_found(e):\n return 'What are you looking for m8?'\n\n\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n\n\[email protected](404)\ndef page_not_found(e):\n return 'What are you looking for m8?'\n\n\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n\n\nclass Players(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(player_query())\n list_players = []\n for row in query:\n player = PlayerInstance(row).get_dict()\n list_players.append(player)\n conn.close()\n return jsonify(list_players)\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n\n\nclass Players(Resource):\n <function token>\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n\n\nclass Player(Resource):\n\n def get(self, player_id):\n conn = engine.connect()\n query = conn.execute(player_query(player_id))\n row = query.fetchone()\n player = PlayerInstance(row).get_dict()\n conn.close()\n return jsonify(player)\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n\n\nclass Player(Resource):\n <function token>\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n\n\nclass Teams(Resource):\n\n def get(self):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query())\n list_teams = []\n for row in query:\n team = TeamInstance(row).get_dict()\n list_teams.append(team)\n conn.close()\n return jsonify(list_teams)\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n\n\nclass Teams(Resource):\n <function token>\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n\n\nclass Team(Resource):\n\n def get(self, team_id):\n conn = engine.connect()\n conn.execute('set @@session.group_concat_max_len=4294967295')\n query = conn.execute(team_query(team_id))\n row = query.fetchone()\n team = TeamInstance(row).get_dict()\n conn.close()\n return jsonify(team)\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n\n\nclass Team(Resource):\n <function token>\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Tourneys(Resource):\n\n def get(self):\n conn = engine.connect()\n query = conn.execute(tourney_query())\n list_tourneys = []\n for row in query:\n tourney = TourneyInstance(row).get_dict()\n list_tourneys.append(tourney)\n conn.close()\n return jsonify(list_tourneys)\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Tourneys(Resource):\n <function token>\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Tourney(Resource):\n\n def get(self, tourney_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(tourney_query(tourney_id))\n row = query.fetchone()\n tourney = TourneyInstance(row).get_dict()\n conn.close()\n return jsonify(tourney)\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Tourney(Resource):\n <function token>\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Games(Resource):\n\n def get(self):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query())\n list_games = []\n for row in query:\n list_games.append(GameInstance(row).get_dict())\n conn.close()\n return jsonify(list_games)\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Games(Resource):\n <function token>\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Game(Resource):\n\n def get(self, game_id):\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n query = conn.execute(game_query(game_id))\n row = query.fetchone()\n conn.close()\n return jsonify(GameInstance(row).get_dict())\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Game(Resource):\n <function token>\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Search(Resource):\n\n def get(self, search_str):\n search_str = Helper.form_regex(search_str)\n if search_str is None:\n return jsonify(['Please enter at least one keyword.'])\n search_results = OrderedDict()\n conn = engine.connect()\n _ = conn.execute(\n 'set @@session.group_concat_max_len=18446744073709551615')\n game_results = conn.execute(search_game(search_str))\n player_results = conn.execute(search_player(search_str))\n team_results = conn.execute(search_team(search_str))\n tourney_results = conn.execute(search_tourney(search_str))\n game_data = []\n player_data = []\n team_data = []\n tourney_data = []\n game_formatter = GameInstance()\n player_formatter = PlayerInstance()\n team_formatter = TeamInstance()\n tourney_formatter = TourneyInstance()\n for row in game_results:\n game_data.append(game_formatter.get_dict(search=True, input_row\n =row))\n for row in player_results:\n player_data.append(player_formatter.get_dict(search=True,\n input_row=row))\n for row in team_results:\n team_data.append(team_formatter.get_dict(search=True, input_row\n =row))\n for row in tourney_results:\n tourney_data.append(tourney_formatter.get_dict(search=True,\n input_row=row))\n search_results['games'] = game_data\n search_results['players'] = player_data\n search_results['teams'] = team_data\n search_results['tournaments'] = tourney_data\n return jsonify(search_results)\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Search(Resource):\n <function token>\n\n\n<code token>\n<docstring token>\n<code token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<docstring token>\n<function token>\n<function token>\n<docstring token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<code token>\n<docstring token>\n<code token>\n"
] | false |
99,002 |
ebe15d833ce4f531816f4b3e4af94002076a6280
|
"""
Code to learn things about remote workers, like MAC address,
NTP timing, etc.
"""
import numpy as np
import time
import re
import subprocess
import ntplib
NTP_SERVERS = ['time.mit.edu',
'ntp1.net.berkeley.edu',
'ntp2.net.berkeley.edu']
def get_time_offset(server, attempts=1):
"""
Returns a list of offsets for a particular server
"""
import ntplib
c = ntplib.NTPClient()
res = []
for i in range(attempts):
try:
r = c.request(server, version=3)
offset = r.offset
delay = r.delay
res.append(offset)
except ntplib.NTPException:
pass
return res
def parse_ifconfig_hwaddr(s):
a = re.search(r'.+?(HWaddr\s+(?P<hardware_address>\S+))', s, re.MULTILINE).groupdict('')
return a['hardware_address']
def parse_ifconfig_inetaddr(s):
return re.findall(r'.+?inet addr:(?P<inet_addr>[\d.]+)', s, re.MULTILINE)
def get_hwaddr():
ifconfig_data = subprocess.check_output("/sbin/ifconfig")
hwaddr = parse_ifconfig_hwaddr(ifconfig_data)
return hwaddr
def get_ifconfig():
ifconfig_data = subprocess.check_output("/sbin/ifconfig")
hwaddr = parse_ifconfig_hwaddr(ifconfig_data)
inet_addr = parse_ifconfig_inetaddr(ifconfig_data)
return hwaddr, inet_addr
def get_uptime():
uptime_str = open("/proc/uptime").read().strip()
up_str, idle_str = uptime_str.split()
return float(up_str), float(idle_str)
|
[
"\"\"\"\nCode to learn things about remote workers, like MAC address, \nNTP timing, etc. \n\"\"\"\n\nimport numpy as np\nimport time\nimport re\nimport subprocess\nimport ntplib\n\nNTP_SERVERS = ['time.mit.edu', \n 'ntp1.net.berkeley.edu', \n 'ntp2.net.berkeley.edu']\n\ndef get_time_offset(server, attempts=1):\n \"\"\"\n Returns a list of offsets for a particular server\n \"\"\"\n import ntplib\n\n c = ntplib.NTPClient()\n\n res = []\n for i in range(attempts):\n try:\n r = c.request(server, version=3)\n offset = r.offset\n delay = r.delay\n res.append(offset)\n except ntplib.NTPException:\n pass\n return res\n\ndef parse_ifconfig_hwaddr(s):\n\n a = re.search(r'.+?(HWaddr\\s+(?P<hardware_address>\\S+))', s, re.MULTILINE).groupdict('')\n return a['hardware_address']\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall(r'.+?inet addr:(?P<inet_addr>[\\d.]+)', s, re.MULTILINE)\n\ndef get_hwaddr():\n ifconfig_data = subprocess.check_output(\"/sbin/ifconfig\")\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n return hwaddr\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output(\"/sbin/ifconfig\")\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\ndef get_uptime():\n uptime_str = open(\"/proc/uptime\").read().strip()\n up_str, idle_str = uptime_str.split()\n\n return float(up_str), float(idle_str)\n\n",
"<docstring token>\nimport numpy as np\nimport time\nimport re\nimport subprocess\nimport ntplib\nNTP_SERVERS = ['time.mit.edu', 'ntp1.net.berkeley.edu', 'ntp2.net.berkeley.edu'\n ]\n\n\ndef get_time_offset(server, attempts=1):\n \"\"\"\n Returns a list of offsets for a particular server\n \"\"\"\n import ntplib\n c = ntplib.NTPClient()\n res = []\n for i in range(attempts):\n try:\n r = c.request(server, version=3)\n offset = r.offset\n delay = r.delay\n res.append(offset)\n except ntplib.NTPException:\n pass\n return res\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\ndef get_hwaddr():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n return hwaddr\n\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\nNTP_SERVERS = ['time.mit.edu', 'ntp1.net.berkeley.edu', 'ntp2.net.berkeley.edu'\n ]\n\n\ndef get_time_offset(server, attempts=1):\n \"\"\"\n Returns a list of offsets for a particular server\n \"\"\"\n import ntplib\n c = ntplib.NTPClient()\n res = []\n for i in range(attempts):\n try:\n r = c.request(server, version=3)\n offset = r.offset\n delay = r.delay\n res.append(offset)\n except ntplib.NTPException:\n pass\n return res\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\ndef get_hwaddr():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n return hwaddr\n\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef get_time_offset(server, attempts=1):\n \"\"\"\n Returns a list of offsets for a particular server\n \"\"\"\n import ntplib\n c = ntplib.NTPClient()\n res = []\n for i in range(attempts):\n try:\n r = c.request(server, version=3)\n offset = r.offset\n delay = r.delay\n res.append(offset)\n except ntplib.NTPException:\n pass\n return res\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\ndef get_hwaddr():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n return hwaddr\n\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\ndef get_time_offset(server, attempts=1):\n \"\"\"\n Returns a list of offsets for a particular server\n \"\"\"\n import ntplib\n c = ntplib.NTPClient()\n res = []\n for i in range(attempts):\n try:\n r = c.request(server, version=3)\n offset = r.offset\n delay = r.delay\n res.append(offset)\n except ntplib.NTPException:\n pass\n return res\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\n<function token>\n\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\n<function token>\n\n\ndef get_ifconfig():\n ifconfig_data = subprocess.check_output('/sbin/ifconfig')\n hwaddr = parse_ifconfig_hwaddr(ifconfig_data)\n inet_addr = parse_ifconfig_inetaddr(ifconfig_data)\n return hwaddr, inet_addr\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n\n\ndef parse_ifconfig_hwaddr(s):\n a = re.search('.+?(HWaddr\\\\s+(?P<hardware_address>\\\\S+))', s, re.MULTILINE\n ).groupdict('')\n return a['hardware_address']\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\n<function token>\n<function token>\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef parse_ifconfig_inetaddr(s):\n return re.findall('.+?inet addr:(?P<inet_addr>[\\\\d.]+)', s, re.MULTILINE)\n\n\n<function token>\n<function token>\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef get_uptime():\n uptime_str = open('/proc/uptime').read().strip()\n up_str, idle_str = uptime_str.split()\n return float(up_str), float(idle_str)\n",
"<docstring token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,003 |
004a6b0f58accb55c71cf8cce19a386fe9a04973
|
# Generated by Django 2.1.5 on 2019-01-24 14:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_userprofile_nick_name'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='user_type',
field=models.CharField(choices=[('s_group_leader', '小组长'), ('b_group_leader', '大组长'), ('administrator', '管理员')], default='', max_length=50, verbose_name='用户类型'),
),
]
|
[
"# Generated by Django 2.1.5 on 2019-01-24 14:16\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('users', '0002_userprofile_nick_name'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='userprofile',\n name='user_type',\n field=models.CharField(choices=[('s_group_leader', '小组长'), ('b_group_leader', '大组长'), ('administrator', '管理员')], default='', max_length=50, verbose_name='用户类型'),\n ),\n ]\n",
"from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0002_userprofile_nick_name')]\n operations = [migrations.AlterField(model_name='userprofile', name=\n 'user_type', field=models.CharField(choices=[('s_group_leader',\n '小组长'), ('b_group_leader', '大组长'), ('administrator', '管理员')],\n default='', max_length=50, verbose_name='用户类型'))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('users', '0002_userprofile_nick_name')]\n operations = [migrations.AlterField(model_name='userprofile', name=\n 'user_type', field=models.CharField(choices=[('s_group_leader',\n '小组长'), ('b_group_leader', '大组长'), ('administrator', '管理员')],\n default='', max_length=50, verbose_name='用户类型'))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
99,004 |
6f99029b03ae29625a3e6eb407519e8af575c58c
|
import numpy as np
from PIL import Image
pix = np.asarray(Image.open('imgglauber/Mamografia_01.jpg'))
edited = pix
edited.setflags(write=1)
mini = np.amin(pix)
maxi = np.amax(pix) - 40
A = 255.0 / (maxi - mini)
B = A * mini
edited = np.clip(A * pix - B, 0, 255)
im = Image.fromarray(np.uint8(edited))
im.save('/home/puf3zin/FURG/graficos/imgglauber/Mamografia_02.jpg')
|
[
"import numpy as np\nfrom PIL import Image\n\n\npix = np.asarray(Image.open('imgglauber/Mamografia_01.jpg'))\n\nedited = pix\nedited.setflags(write=1)\n\nmini = np.amin(pix)\nmaxi = np.amax(pix) - 40\n\nA = 255.0 / (maxi - mini)\nB = A * mini\n\n\nedited = np.clip(A * pix - B, 0, 255)\n\nim = Image.fromarray(np.uint8(edited))\nim.save('/home/puf3zin/FURG/graficos/imgglauber/Mamografia_02.jpg')\n",
"import numpy as np\nfrom PIL import Image\npix = np.asarray(Image.open('imgglauber/Mamografia_01.jpg'))\nedited = pix\nedited.setflags(write=1)\nmini = np.amin(pix)\nmaxi = np.amax(pix) - 40\nA = 255.0 / (maxi - mini)\nB = A * mini\nedited = np.clip(A * pix - B, 0, 255)\nim = Image.fromarray(np.uint8(edited))\nim.save('/home/puf3zin/FURG/graficos/imgglauber/Mamografia_02.jpg')\n",
"<import token>\npix = np.asarray(Image.open('imgglauber/Mamografia_01.jpg'))\nedited = pix\nedited.setflags(write=1)\nmini = np.amin(pix)\nmaxi = np.amax(pix) - 40\nA = 255.0 / (maxi - mini)\nB = A * mini\nedited = np.clip(A * pix - B, 0, 255)\nim = Image.fromarray(np.uint8(edited))\nim.save('/home/puf3zin/FURG/graficos/imgglauber/Mamografia_02.jpg')\n",
"<import token>\n<assignment token>\nedited.setflags(write=1)\n<assignment token>\nim.save('/home/puf3zin/FURG/graficos/imgglauber/Mamografia_02.jpg')\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,005 |
4a6820c6d8338fbd76eb4534d40bf83d71d68882
|
from aiohttp import ClientSession
from pycld2 import detect
from re import compile
from .models import PageRaw
class ArticleExtractor(object):
async def extract_article(self, content: str, url: str) -> dict:
"""Returns the article section of the given url."""
raise NotImplementedError()
class ReadabilityArticleExtractor(ArticleExtractor):
_READABILITY_URL = 'http://readability:5000/extract'
def __init__(self, client_session: ClientSession):
self._client_session = client_session
async def extract_article(self, content: str, url: str) -> dict:
async with self._client_session.post(
ReadabilityArticleExtractor._READABILITY_URL,
data={'url': url, 'content': content}) as response:
assert response.status == 200, \
'TextExtractor service returned status [%d].' % response.status
json = await response.json()
if not json:
return {}
return {'authors': json.get('byline'),
'summary': json.get('excerpt'),
'length': json.get('length'),
'content_html': json.get('content'),
'content_text': json.get('textContent'),
'title': json.get('title')}
class PageFetcher(object):
async def fetch_page(self, url: str) -> PageRaw:
"""Returns the full page of the given url."""
raise NotImplementedError()
class HttpPageFetcher(PageFetcher):
def __init__(self, client_session: ClientSession):
self._client_session = client_session
async def fetch_page(self, url: str) -> PageRaw:
async with self._client_session.get(url) as response:
assert response.status == 200, \
'Unexpected status [%d].' % response.status
content = await response.text()
return PageRaw(content=content)
class DocumentExtractorService(object):
_ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')
_ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')
def __init__(self, page_fetcher: PageFetcher,
article_extractor: ArticleExtractor):
self._page_fetcher = page_fetcher
self._article_extractor = article_extractor
@staticmethod
def _extract_page_info(article: dict, url: str) -> dict:
"""Extracts additional page information."""
if not article:
return {}
language = detect(article.get('content_text'))
if len(language) > 2 and len(language[2]) > 1:
language_code = language[2][0][1]
else:
language_code = None
return {'url': url, 'language': language_code}
async def extract(self, url: str) -> dict:
"""Returns article content and page information for the given url.
Returns:
(dict): A dictionary with the values
- article
- authors: The authors.
- summary: The article summary.
- length: The number of characters.
- title: The title.
- content_html: The article content in HTML with links.
- content_text: The article content in plain text.
- links: The external links.
- images: The images.
- page
- url: The page url.
- language: The language code (2 digits).
- insight (not yet implemented)
- entities: Entities name recognition.
"""
page_raw = await self._page_fetcher.fetch_page(url)
article = await self._extract_article_info(page_raw.content, url)
page = self._extract_page_info(article, url)
return {'article': article, 'page': page}
async def _extract_article_info(self, content: str, url: str) -> dict:
article = await self._article_extractor.extract_article(content, url)
if not article:
return {}
article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(
article.get('content_html'))))
article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(
article.get('content_html'))))
return article
|
[
"from aiohttp import ClientSession\nfrom pycld2 import detect\nfrom re import compile\n\nfrom .models import PageRaw\n\n\nclass ArticleExtractor(object):\n async def extract_article(self, content: str, url: str) -> dict:\n \"\"\"Returns the article section of the given url.\"\"\"\n\n raise NotImplementedError()\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n _READABILITY_URL = 'http://readability:5000/extract'\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def extract_article(self, content: str, url: str) -> dict:\n async with self._client_session.post(\n ReadabilityArticleExtractor._READABILITY_URL,\n data={'url': url, 'content': content}) as response:\n assert response.status == 200, \\\n 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'),\n 'summary': json.get('excerpt'),\n 'length': json.get('length'),\n 'content_html': json.get('content'),\n 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n async def fetch_page(self, url: str) -> PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) -> PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, \\\n 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\\\"([^\\\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\\\"([^\\\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher,\n article_extractor: ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) -> dict:\n \"\"\"Extracts additional page information.\"\"\"\n\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) -> dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) -> dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"from aiohttp import ClientSession\nfrom pycld2 import detect\nfrom re import compile\nfrom .models import PageRaw\n\n\nclass ArticleExtractor(object):\n\n async def extract_article(self, content: str, url: str) ->dict:\n \"\"\"Returns the article section of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n _READABILITY_URL = 'http://readability:5000/extract'\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def extract_article(self, content: str, url: str) ->dict:\n async with self._client_session.post(ReadabilityArticleExtractor.\n _READABILITY_URL, data={'url': url, 'content': content}\n ) as response:\n assert response.status == 200, 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'), 'summary': json.get(\n 'excerpt'), 'length': json.get('length'), 'content_html': json.\n get('content'), 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n\n\nclass ArticleExtractor(object):\n\n async def extract_article(self, content: str, url: str) ->dict:\n \"\"\"Returns the article section of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n _READABILITY_URL = 'http://readability:5000/extract'\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def extract_article(self, content: str, url: str) ->dict:\n async with self._client_session.post(ReadabilityArticleExtractor.\n _READABILITY_URL, data={'url': url, 'content': content}\n ) as response:\n assert response.status == 200, 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'), 'summary': json.get(\n 'excerpt'), 'length': json.get('length'), 'content_html': json.\n get('content'), 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n _READABILITY_URL = 'http://readability:5000/extract'\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def extract_article(self, content: str, url: str) ->dict:\n async with self._client_session.post(ReadabilityArticleExtractor.\n _READABILITY_URL, data={'url': url, 'content': content}\n ) as response:\n assert response.status == 200, 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'), 'summary': json.get(\n 'excerpt'), 'length': json.get('length'), 'content_html': json.\n get('content'), 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n <assignment token>\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def extract_article(self, content: str, url: str) ->dict:\n async with self._client_session.post(ReadabilityArticleExtractor.\n _READABILITY_URL, data={'url': url, 'content': content}\n ) as response:\n assert response.status == 200, 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'), 'summary': json.get(\n 'excerpt'), 'length': json.get('length'), 'content_html': json.\n get('content'), 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n\n\nclass ReadabilityArticleExtractor(ArticleExtractor):\n <assignment token>\n <function token>\n\n async def extract_article(self, content: str, url: str) ->dict:\n async with self._client_session.post(ReadabilityArticleExtractor.\n _READABILITY_URL, data={'url': url, 'content': content}\n ) as response:\n assert response.status == 200, 'TextExtractor service returned status [%d].' % response.status\n json = await response.json()\n if not json:\n return {}\n return {'authors': json.get('byline'), 'summary': json.get(\n 'excerpt'), 'length': json.get('length'), 'content_html': json.\n get('content'), 'content_text': json.get('textContent'),\n 'title': json.get('title')}\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n\n\nclass PageFetcher(object):\n\n async def fetch_page(self, url: str) ->PageRaw:\n \"\"\"Returns the full page of the given url.\"\"\"\n raise NotImplementedError()\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass HttpPageFetcher(PageFetcher):\n\n def __init__(self, client_session: ClientSession):\n self._client_session = client_session\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass HttpPageFetcher(PageFetcher):\n <function token>\n\n async def fetch_page(self, url: str) ->PageRaw:\n async with self._client_session.get(url) as response:\n assert response.status == 200, 'Unexpected status [%d].' % response.status\n content = await response.text()\n return PageRaw(content=content)\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass DocumentExtractorService(object):\n _ARTICLE_LINK_REGEX = compile('href=\"([^\"]*)')\n _ARTICLE_IMAGE_REGEX = compile('src=\"([^\"]*)')\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass DocumentExtractorService(object):\n <assignment token>\n <assignment token>\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n\n @staticmethod\n def _extract_page_info(article: dict, url: str) ->dict:\n \"\"\"Extracts additional page information.\"\"\"\n if not article:\n return {}\n language = detect(article.get('content_text'))\n if len(language) > 2 and len(language[2]) > 1:\n language_code = language[2][0][1]\n else:\n language_code = None\n return {'url': url, 'language': language_code}\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass DocumentExtractorService(object):\n <assignment token>\n <assignment token>\n\n def __init__(self, page_fetcher: PageFetcher, article_extractor:\n ArticleExtractor):\n self._page_fetcher = page_fetcher\n self._article_extractor = article_extractor\n <function token>\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass DocumentExtractorService(object):\n <assignment token>\n <assignment token>\n <function token>\n <function token>\n\n async def extract(self, url: str) ->dict:\n \"\"\"Returns article content and page information for the given url.\n\n Returns:\n (dict): A dictionary with the values\n - article\n - authors: The authors.\n - summary: The article summary.\n - length: The number of characters.\n - title: The title.\n - content_html: The article content in HTML with links.\n - content_text: The article content in plain text.\n - links: The external links.\n - images: The images.\n - page\n - url: The page url.\n - language: The language code (2 digits).\n - insight (not yet implemented)\n - entities: Entities name recognition.\n \"\"\"\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}\n\n async def _extract_article_info(self, content: str, url: str) ->dict:\n article = await self._article_extractor.extract_article(content, url)\n if not article:\n return {}\n article['links'] = list(set(self._ARTICLE_LINK_REGEX.findall(\n article.get('content_html'))))\n article['images'] = list(set(self._ARTICLE_IMAGE_REGEX.findall(\n article.get('content_html'))))\n return article\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,006 |
6bb01c6e1cefa3cbb1eefc5995d7637a82dd9003
|
#!/usr/bin/python3
import sys
import logging
import os
logging.basicConfig(stream=sys.stderr)
PROJECT_DIR = '/var/www/SSApp/{{app_version}}/'
#activate_this = os.path.join(PROJECT_DIR,'venv/bin', 'activate_this.py')
#with open(activate_this) as f:
#code = compile(f.read(), activate_this, 'exec')
#exec(code, dict(__file__=activate_this))
#exec(open(activate_this).read()
sys.path.insert(0,PROJECT_DIR)
# Shimmy to allow wsgi/apache to pass environment variables in to apache
# Assumes all of our environment variables begin SS_
# http://ericplumb.com/blog/passing-apache-environment-variables-to-django-via-mod_wsgi.html
def application(environ, start_response):
# pass the WSGI environment variables on through to os.environ
for key in environ:
if key.startswith('SS_'):
os.environ[key] = environ[key]
# Required by boto for CloudWatch
os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']
from manage import app as _application
_application.secret_key = { removed }
return _application(environ, start_response)
|
[
"#!/usr/bin/python3\r\nimport sys\r\nimport logging\r\nimport os\r\n\r\nlogging.basicConfig(stream=sys.stderr)\r\n\r\nPROJECT_DIR = '/var/www/SSApp/{{app_version}}/'\r\n\r\n#activate_this = os.path.join(PROJECT_DIR,'venv/bin', 'activate_this.py')\r\n\r\n#with open(activate_this) as f:\r\n #code = compile(f.read(), activate_this, 'exec')\r\n #exec(code, dict(__file__=activate_this))\r\n\r\n#exec(open(activate_this).read()\r\n\r\n\r\nsys.path.insert(0,PROJECT_DIR)\r\n\r\n\r\n# Shimmy to allow wsgi/apache to pass environment variables in to apache\r\n# Assumes all of our environment variables begin SS_\r\n# http://ericplumb.com/blog/passing-apache-environment-variables-to-django-via-mod_wsgi.html\r\ndef application(environ, start_response):\r\n # pass the WSGI environment variables on through to os.environ\r\n for key in environ:\r\n if key.startswith('SS_'):\r\n os.environ[key] = environ[key]\r\n\r\n # Required by boto for CloudWatch\r\n os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']\r\n\r\n from manage import app as _application\r\n _application.secret_key = { removed }\r\n\r\n return _application(environ, start_response)\r\n\r\n",
"import sys\nimport logging\nimport os\nlogging.basicConfig(stream=sys.stderr)\nPROJECT_DIR = '/var/www/SSApp/{{app_version}}/'\nsys.path.insert(0, PROJECT_DIR)\n\n\ndef application(environ, start_response):\n for key in environ:\n if key.startswith('SS_'):\n os.environ[key] = environ[key]\n os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']\n from manage import app as _application\n _application.secret_key = {removed}\n return _application(environ, start_response)\n",
"<import token>\nlogging.basicConfig(stream=sys.stderr)\nPROJECT_DIR = '/var/www/SSApp/{{app_version}}/'\nsys.path.insert(0, PROJECT_DIR)\n\n\ndef application(environ, start_response):\n for key in environ:\n if key.startswith('SS_'):\n os.environ[key] = environ[key]\n os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']\n from manage import app as _application\n _application.secret_key = {removed}\n return _application(environ, start_response)\n",
"<import token>\nlogging.basicConfig(stream=sys.stderr)\n<assignment token>\nsys.path.insert(0, PROJECT_DIR)\n\n\ndef application(environ, start_response):\n for key in environ:\n if key.startswith('SS_'):\n os.environ[key] = environ[key]\n os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']\n from manage import app as _application\n _application.secret_key = {removed}\n return _application(environ, start_response)\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n\n\ndef application(environ, start_response):\n for key in environ:\n if key.startswith('SS_'):\n os.environ[key] = environ[key]\n os.environ['AWS_DEFAULT_REGION'] = environ['AWS_DEFAULT_REGION']\n from manage import app as _application\n _application.secret_key = {removed}\n return _application(environ, start_response)\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n<function token>\n"
] | false |
99,007 |
8a6a6186275dacfef32d41260adc6298f778e205
|
#Marcela Uliano-Silva, Wellcome Sanger Institute
import argparse
import pandas as pd
from Bio import SeqIO
parser= argparse.ArgumentParser(add_help=False)
parser.add_argument("-h", "--help", action="help", default=argparse.SUPPRESS, help= "Concatenate the fasta sequences into a file giving a text file with the path to each fasta per line")
parser.add_argument("-i", help= "-i: list of fasta paths, one per line", required = "True")
parser.add_argument("-o", help= "-o: concatenated fasta sequence", required = "True")
args = parser.parse_args()
oi=('path', 'oi')
paths = pd.read_csv(args.i, names=oi)
paths_list = paths["path"].values.tolist()
with open(args.o, 'w') as outfile:
for f in paths_list:
with open(f) as infile:
outfile.write(infile.read())
|
[
"\n\n#Marcela Uliano-Silva, Wellcome Sanger Institute\n\n\nimport argparse\nimport pandas as pd\nfrom Bio import SeqIO\n\nparser= argparse.ArgumentParser(add_help=False)\nparser.add_argument(\"-h\", \"--help\", action=\"help\", default=argparse.SUPPRESS, help= \"Concatenate the fasta sequences into a file giving a text file with the path to each fasta per line\") \nparser.add_argument(\"-i\", help= \"-i: list of fasta paths, one per line\", required = \"True\")\nparser.add_argument(\"-o\", help= \"-o: concatenated fasta sequence\", required = \"True\")\n\nargs = parser.parse_args()\n\noi=('path', 'oi')\npaths = pd.read_csv(args.i, names=oi)\n\npaths_list = paths[\"path\"].values.tolist()\n\nwith open(args.o, 'w') as outfile:\n for f in paths_list:\n with open(f) as infile:\n outfile.write(infile.read())\n",
"import argparse\nimport pandas as pd\nfrom Bio import SeqIO\nparser = argparse.ArgumentParser(add_help=False)\nparser.add_argument('-h', '--help', action='help', default=argparse.\n SUPPRESS, help=\n 'Concatenate the fasta sequences into a file giving a text file with the path to each fasta per line'\n )\nparser.add_argument('-i', help='-i: list of fasta paths, one per line',\n required='True')\nparser.add_argument('-o', help='-o: concatenated fasta sequence', required=\n 'True')\nargs = parser.parse_args()\noi = 'path', 'oi'\npaths = pd.read_csv(args.i, names=oi)\npaths_list = paths['path'].values.tolist()\nwith open(args.o, 'w') as outfile:\n for f in paths_list:\n with open(f) as infile:\n outfile.write(infile.read())\n",
"<import token>\nparser = argparse.ArgumentParser(add_help=False)\nparser.add_argument('-h', '--help', action='help', default=argparse.\n SUPPRESS, help=\n 'Concatenate the fasta sequences into a file giving a text file with the path to each fasta per line'\n )\nparser.add_argument('-i', help='-i: list of fasta paths, one per line',\n required='True')\nparser.add_argument('-o', help='-o: concatenated fasta sequence', required=\n 'True')\nargs = parser.parse_args()\noi = 'path', 'oi'\npaths = pd.read_csv(args.i, names=oi)\npaths_list = paths['path'].values.tolist()\nwith open(args.o, 'w') as outfile:\n for f in paths_list:\n with open(f) as infile:\n outfile.write(infile.read())\n",
"<import token>\n<assignment token>\nparser.add_argument('-h', '--help', action='help', default=argparse.\n SUPPRESS, help=\n 'Concatenate the fasta sequences into a file giving a text file with the path to each fasta per line'\n )\nparser.add_argument('-i', help='-i: list of fasta paths, one per line',\n required='True')\nparser.add_argument('-o', help='-o: concatenated fasta sequence', required=\n 'True')\n<assignment token>\nwith open(args.o, 'w') as outfile:\n for f in paths_list:\n with open(f) as infile:\n outfile.write(infile.read())\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,008 |
14e16c46cb2dff9d56aceeaf5e4708591565fd5e
|
from django.contrib import admin
from models import Normal
from nani.admin import TranslateableAdmin
from testproject.app.models import Standard
admin.site.register(Normal, TranslateableAdmin)
|
[
"from django.contrib import admin\nfrom models import Normal\nfrom nani.admin import TranslateableAdmin\nfrom testproject.app.models import Standard\n\n\nadmin.site.register(Normal, TranslateableAdmin)",
"from django.contrib import admin\nfrom models import Normal\nfrom nani.admin import TranslateableAdmin\nfrom testproject.app.models import Standard\nadmin.site.register(Normal, TranslateableAdmin)\n",
"<import token>\nadmin.site.register(Normal, TranslateableAdmin)\n",
"<import token>\n<code token>\n"
] | false |
99,009 |
7067eb9840f0e2fc9128f3c52084cdea6c7de011
|
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from devices import views
app_name = 'devices'
router = DefaultRouter()
router.register('ports-vlan', views.PortVlanMemberModelViewSet)
router.register('ports', views.PortModelViewSet)
router.register('pon', views.DevicePONViewSet)
router.register('', views.DeviceModelViewSet)
urlpatterns = [
path('groups/', views.DeviceGroupsList.as_view()),
path('without_groups/', views.DeviceWithoutGroupListAPIView.as_view()),
path('', include(router.urls)),
]
|
[
"from django.urls import path, include\nfrom rest_framework.routers import DefaultRouter\nfrom devices import views\n\n\napp_name = 'devices'\n\n\nrouter = DefaultRouter()\nrouter.register('ports-vlan', views.PortVlanMemberModelViewSet)\nrouter.register('ports', views.PortModelViewSet)\nrouter.register('pon', views.DevicePONViewSet)\nrouter.register('', views.DeviceModelViewSet)\n\nurlpatterns = [\n path('groups/', views.DeviceGroupsList.as_view()),\n path('without_groups/', views.DeviceWithoutGroupListAPIView.as_view()),\n path('', include(router.urls)),\n]\n",
"from django.urls import path, include\nfrom rest_framework.routers import DefaultRouter\nfrom devices import views\napp_name = 'devices'\nrouter = DefaultRouter()\nrouter.register('ports-vlan', views.PortVlanMemberModelViewSet)\nrouter.register('ports', views.PortModelViewSet)\nrouter.register('pon', views.DevicePONViewSet)\nrouter.register('', views.DeviceModelViewSet)\nurlpatterns = [path('groups/', views.DeviceGroupsList.as_view()), path(\n 'without_groups/', views.DeviceWithoutGroupListAPIView.as_view()), path\n ('', include(router.urls))]\n",
"<import token>\napp_name = 'devices'\nrouter = DefaultRouter()\nrouter.register('ports-vlan', views.PortVlanMemberModelViewSet)\nrouter.register('ports', views.PortModelViewSet)\nrouter.register('pon', views.DevicePONViewSet)\nrouter.register('', views.DeviceModelViewSet)\nurlpatterns = [path('groups/', views.DeviceGroupsList.as_view()), path(\n 'without_groups/', views.DeviceWithoutGroupListAPIView.as_view()), path\n ('', include(router.urls))]\n",
"<import token>\n<assignment token>\nrouter.register('ports-vlan', views.PortVlanMemberModelViewSet)\nrouter.register('ports', views.PortModelViewSet)\nrouter.register('pon', views.DevicePONViewSet)\nrouter.register('', views.DeviceModelViewSet)\n<assignment token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n"
] | false |
99,010 |
574421b4f6d4931627d57bfd8046cbec02cc2cb9
|
#
# https://leetcode.com/problems/ransom-note/
#
# We make use of builtin data structure callec Counter from the collections package
# We could have also used normal dictionary or default dict
#
# Time Complexity: O(n)
from collections import Counter
class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
if not (Counter(ransomNote) - Counter(magazine)):
return True
return False
if __name__ == '__main__':
magazine = 'ab'
ransom = 'a'
ans = Solution().canConstruct(ransom, magazine)
print(ans)
|
[
"#\n# https://leetcode.com/problems/ransom-note/\n#\n# We make use of builtin data structure callec Counter from the collections package\n# We could have also used normal dictionary or default dict\n#\n# Time Complexity: O(n)\n\nfrom collections import Counter\n\n\nclass Solution:\n def canConstruct(self, ransomNote: str, magazine: str) -> bool:\n if not (Counter(ransomNote) - Counter(magazine)):\n return True\n return False\n\n\nif __name__ == '__main__':\n magazine = 'ab'\n ransom = 'a'\n ans = Solution().canConstruct(ransom, magazine)\n print(ans)\n",
"from collections import Counter\n\n\nclass Solution:\n\n def canConstruct(self, ransomNote: str, magazine: str) ->bool:\n if not Counter(ransomNote) - Counter(magazine):\n return True\n return False\n\n\nif __name__ == '__main__':\n magazine = 'ab'\n ransom = 'a'\n ans = Solution().canConstruct(ransom, magazine)\n print(ans)\n",
"<import token>\n\n\nclass Solution:\n\n def canConstruct(self, ransomNote: str, magazine: str) ->bool:\n if not Counter(ransomNote) - Counter(magazine):\n return True\n return False\n\n\nif __name__ == '__main__':\n magazine = 'ab'\n ransom = 'a'\n ans = Solution().canConstruct(ransom, magazine)\n print(ans)\n",
"<import token>\n\n\nclass Solution:\n\n def canConstruct(self, ransomNote: str, magazine: str) ->bool:\n if not Counter(ransomNote) - Counter(magazine):\n return True\n return False\n\n\n<code token>\n",
"<import token>\n\n\nclass Solution:\n <function token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<code token>\n"
] | false |
99,011 |
39e919f2fc239ba153052fa4d9a12e18bd132e5e
|
from __future__ import division
import os
import warnings
import pandas as pd
import numpy as np
import random
import nibabel
import torch
import torchvision.transforms as transforms
#import torch.utils.transforms as extended_transforms
from torch.utils.data import Dataset, DataLoader
from . import data
from .utils import export
from skimage import io
from PIL import Image
from sklearn.metrics import roc_auc_score
from skimage.transform import resize
######################################################
######################################################
######################################################
@export
def cxr14():
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
train_transformation = data.TransformTwice(transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
eval_transformation = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])
return {
'train_transformation': train_transformation,
'eval_transformation': eval_transformation,
# 'datadir': '../data/cxr14/',
# 'csvdir': '../data_csv/',
# 'num_classes': None
}
class MaskToTensor(object):
def __call__(self, img):
return torch.from_numpy(np.array(img, dtype=np.int32)).long()
def RotateFlip(angle, flip):
channel_stats = dict(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_transformation = transforms.Compose([
transforms.RandomRotation(degrees=(angle,angle)),
transforms.RandomHorizontalFlip(p=flip),
transforms.Resize(256),
transforms.ToTensor(),
transforms.Normalize(**channel_stats)
])
target_transformation = transforms.Compose([
transforms.RandomRotation(degrees=(angle,angle)),
transforms.RandomHorizontalFlip(p=flip),
transforms.Resize(256),
transforms.ToTensor()
])
return train_transformation, target_transformation
def RotateFlipFlip(angle, hflip, vflip):
channel_stats = dict(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_transformation = transforms.Compose([
# transforms.ToPILImage(),
transforms.RandomRotation(degrees=(angle,angle)),
transforms.RandomHorizontalFlip(p=hflip),
transforms.RandomVerticalFlip(p=vflip),
transforms.Resize(256),
transforms.ToTensor(),
# transforms.Normalize(**channel_stats)
])
target_transformation = transforms.Compose([
# transforms.ToPILImage(),
transforms.RandomRotation(degrees=(angle,angle)),
transforms.RandomHorizontalFlip(p=hflip),
transforms.RandomVerticalFlip(p=vflip),
transforms.Resize(256),
transforms.ToTensor()
])
return train_transformation, target_transformation
@export
def ventricleNormal():
channel_stats = dict(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
chance = random.random()
angles = range(-5,6) #rotate angles -5 to 5
num_transforms = len(angles)
#for i in range(num_transforms * 2):
# if i/(num_transforms * 2) <= chance < (1 + i)/(num_transforms * 2):
# train_transformation, target_transformation = RotateFlip( angles[i % num_transforms], i // num_transforms)
for i in range(num_transforms * 4):
if i/(num_transforms * 4) <= chance < (1 + i)/(num_transforms * 4):
train_transformation, target_transformation = RotateFlipFlip( angles[i % num_transforms], i // num_transforms, (i // num_transforms) % 2)
eval_transformation = transforms.Compose([
transforms.Resize(256),
transforms.ToTensor(),
# transforms.Normalize(**channel_stats)
])
eval_target_transformation = transforms.Compose([
transforms.Resize(256),
transforms.ToTensor(),
])
return {
'train_transformation': train_transformation,
'target_transformation': target_transformation,
'eval_transformation': eval_transformation,
'eval_target_transformation': eval_target_transformation
}
@export
def imagenet():
channel_stats = dict(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_transformation = data.TransformTwice(transforms.Compose([
transforms.RandomRotation(10),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1),
transforms.ToTensor(),
transforms.Normalize(**channel_stats)
]))
eval_transformation = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(**channel_stats)
])
return {
'train_transformation': train_transformation,
'eval_transformation': eval_transformation
#'datadir': 'data-local/images/ilsvrc2012/',
#'num_classes': 1000
}
@export
def cifar10():
channel_stats = dict(mean=[0.4914, 0.4822, 0.4465],
std=[0.2470, 0.2435, 0.2616])
train_transformation = data.TransformTwice(transforms.Compose([
data.RandomTranslateWithReflect(4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(**channel_stats)
]))
eval_transformation = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(**channel_stats)
])
return {
'train_transformation': train_transformation,
'eval_transformation': eval_transformation,
'datadir': 'data-local/images/cifar/cifar10/by-image',
'num_classes': 10
}
### complete version similar to torchvision.datasets.ImageFolder / torchvision.datasets.DatasetFolder
class ChestXRayDataset(Dataset):
""" CXR8 dataset."""
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.root_dir = root_dir
self.transform = transform
df = pd.read_csv(csv_file)
classes = df.columns[3:].values.tolist()
self.class_to_idx = {classes[i]: i for i in range(len(classes))}
self.idx_to_class = dict(enumerate(classes))
self.classes = classes
samples = []
for idx in range(len(df)):
path = df.iloc[idx]['image_path']
target = df.iloc[idx, 3:].as_matrix().astype('float32') ### labels type: array
item = (path, target)
samples.append(item)
assert(len(samples) == len(df))
self.samples = samples
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
path, target = self.samples[index]
### load image
img_name = os.path.join(self.root_dir, path) ### get 'image_path'
image = io.imread(img_name)
if(len(image.shape) == 3): ### some samples have four channels
image = image[:,:,0]
h, w = image.shape
c = 3
images = np.zeros((h, w, c), dtype = np.uint8) ### Set image channel dim = 3
for i in range(c):
images[:,:,i] = image
assert(images.shape == (1024,1024,3))
images = Image.fromarray(images)
if self.transform:
images = self.transform(images)
### load labels
labels = torch.from_numpy(target)
### return tuple
return (images, labels)
class IVCdataset(Dataset):
def __init__(self, csv_file, path, transform=None):
"""
csv_file = csv where first column = image filenames and second column = classification
path = directory to all iamges
"""
self.path = path
self.transform = transform
df = pd.read_csv(csv_file, header=None)
classes = df.iloc[:,1].values.tolist()
self.class_to_idx = {classes[i]: i for i in range(len(classes))}
self.idx_to_class = dict(enumerate(classes))
self.classes = classes
print("> dataset size: ", df.shape[0])
#load labels
samples = []
for i in range(len(df)):
name = df.iloc[i,0]
target = df.iloc[i,1].astype('int_')
item = (name, target)
samples.append(item)
assert(len(samples) == len(df))
self.samples = samples
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
path, target = self.samples[index]
img_name = os.path.join(self.path, path)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
image = io.imread(img_name)
if (len(image.shape)==3):
image = image[:,:,0]
#with warnings.catch_warnings():
# warnings.simplefilter("ignore")
# image = resize(image, (224,224))
image = image.astype('float32')
h, w = image.shape
c = 3
images = np.zeros((h, w, c), dtype = np.uint8)
for i in range(c):
images[:,:,i] = image
#assert(images.shape == (1024,1024,3))
images = Image.fromarray(images)
#trans = transforms.ToTensor()
#images = trans(images)
if self.transform:
images = self.transform(images)
labels = torch.from_numpy(np.array([target]))
return (images, labels)
def loadImages(image, basedir):
#img_name = os.path.join(basedir, image)
#img_name = nibabel.load(img_name).get_data()
#with warnings.catch_warnings():
# warnings.simplefilter("ignore")
# image = io.imread(img_name)
if (len(image.shape)==3):
image = image[:,:,0]
image = image.astype('float32')
h, w = image.shape
c = 3
images = np.zeros((h, w, c), dtype = np.uint8)
for i in range(c):
images[:,:,i] = image
images = Image.fromarray(images)
return images
class Ventricles(Dataset):
def __init__(self, csv_file, path_raw, path_segs, input_transform=None, target_transform=None, train=False):
self.path_raw = path_raw
self.path_segs = path_segs
self.input_transform = input_transform
self.target_transform = target_transform
self.train = train
df = pd.read_csv(csv_file, header=None)
#print("Dataset size: ", df.shape[0])
samples = []
#lower = round( len(df) / 5 )
#upper = round( len(df) / 5 * 4 )
for i in range(len(df)):
name = df.iloc[i,0]
target = df.iloc[i,1]
image_name = os.path.join(path_raw, name)
target_name = os.path.join(path_segs, target)
image_ni = nibabel.load(image_name).get_data()
target_ni = nibabel.load(target_name).get_data()
slices = image_ni.shape[2]
lower = slices / 4
upper = slices / 4 * 3
for i in range(slices):
name = image_ni[:,:,i]
target = target_ni[:,:,i]
item = (name, target)
samples.append(item)
if train and lower < i < upper:
for _ in range(3): samples.append(item)
self.samples = samples
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
#images, targets = self.samples[index]
image, target = self.samples[index]
images = loadImages(image, self.path_raw)
targets = loadImages(target, self.path_segs)
#images = image
#targets = target
tobinary = targets.convert('L')
targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')
if self.train:
channel_stats = dict(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
chance = random.random()
angle = range(-5,6) #rotate angles -5 to 5
n_angles = len(angle)
#for i in range(num_transforms * 2):
# if i/(num_transforms * 2) <= chance < (1 + i)/(num_transforms * 2):
# train_transformation, target_transformation = RotateFlip( angles[i % num_transforms], i // num_transforms)
# print('angles/flip', angles[i % num_transforms], i // num_transforms)
for i in range(n_angles * 4):
if i/(n_angles * 4) <= chance < (1 + i)/(n_angles * 4):
input_transform, target_transform = RotateFlipFlip( angle[i % n_angles], i // n_angles, (i // n_angles) % 2)
images = input_transform(images)
targets_mask = target_transform(targets_mask)
#targets_mask = target_transform(targets)
else:
if self.input_transform:
images = self.input_transform(images)
if self.target_transform:
targets_mask = self.target_transform(targets_mask)
return (images, targets_mask)
|
[
"from __future__ import division\n\nimport os\nimport warnings\nimport pandas as pd\nimport numpy as np\nimport random\nimport nibabel\n\nimport torch\nimport torchvision.transforms as transforms\n#import torch.utils.transforms as extended_transforms\nfrom torch.utils.data import Dataset, DataLoader\n\nfrom . import data\nfrom .utils import export\n\n\nfrom skimage import io\nfrom PIL import Image\nfrom sklearn.metrics import roc_auc_score\nfrom skimage.transform import resize\n\n######################################################\n######################################################\n######################################################\n\n@export\ndef cxr14():\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225]\n )\n\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n normalize,\n ]))\n\n eval_transformation = transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n normalize,\n ])\n\n return {\n 'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation,\n # 'datadir': '../data/cxr14/',\n # 'csvdir': '../data_csv/',\n # 'num_classes': None\n }\n\n\nclass MaskToTensor(object):\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\ndef RotateFlip(angle, flip): \n channel_stats = dict(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([\n transforms.RandomRotation(degrees=(angle,angle)),\n transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256),\n transforms.ToTensor(),\n transforms.Normalize(**channel_stats)\n ])\n target_transformation = transforms.Compose([\n transforms.RandomRotation(degrees=(angle,angle)),\n transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256),\n transforms.ToTensor()\n ])\n\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip): \n channel_stats = dict(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n \n train_transformation = transforms.Compose([\n # transforms.ToPILImage(),\n transforms.RandomRotation(degrees=(angle,angle)),\n transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip),\n transforms.Resize(256),\n transforms.ToTensor(),\n# transforms.Normalize(**channel_stats)\n ])\n target_transformation = transforms.Compose([\n # transforms.ToPILImage(),\n transforms.RandomRotation(degrees=(angle,angle)),\n transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip),\n transforms.Resize(256),\n transforms.ToTensor()\n ])\n\n return train_transformation, target_transformation\n\n\n\n@export\ndef ventricleNormal():\n channel_stats = dict(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n\n chance = random.random()\n angles = range(-5,6) #rotate angles -5 to 5\n num_transforms = len(angles)\n\n #for i in range(num_transforms * 2):\n # if i/(num_transforms * 2) <= chance < (1 + i)/(num_transforms * 2):\n # train_transformation, target_transformation = RotateFlip( angles[i % num_transforms], i // num_transforms)\n for i in range(num_transforms * 4):\n if i/(num_transforms * 4) <= chance < (1 + i)/(num_transforms * 4):\n train_transformation, target_transformation = RotateFlipFlip( angles[i % num_transforms], i // num_transforms, (i // num_transforms) % 2)\n\n eval_transformation = transforms.Compose([\n transforms.Resize(256),\n transforms.ToTensor(),\n # transforms.Normalize(**channel_stats)\n ])\n\n eval_target_transformation = transforms.Compose([\n transforms.Resize(256),\n transforms.ToTensor(),\n ])\n\n return {\n 'train_transformation': train_transformation,\n 'target_transformation': target_transformation,\n 'eval_transformation': eval_transformation,\n 'eval_target_transformation': eval_target_transformation\n }\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10),\n transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(),\n transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1),\n transforms.ToTensor(),\n transforms.Normalize(**channel_stats)\n ]))\n eval_transformation = transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(**channel_stats)\n ])\n\n return {\n 'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation\n #'datadir': 'data-local/images/ilsvrc2012/',\n #'num_classes': 1000\n }\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465],\n std=[0.2470, 0.2435, 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([\n data.RandomTranslateWithReflect(4),\n transforms.RandomHorizontalFlip(),\n transforms.ToTensor(),\n transforms.Normalize(**channel_stats)\n ]))\n eval_transformation = transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize(**channel_stats)\n ])\n\n return {\n 'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation,\n 'datadir': 'data-local/images/cifar/cifar10/by-image',\n 'num_classes': 10\n }\n\n\n### complete version similar to torchvision.datasets.ImageFolder / torchvision.datasets.DatasetFolder\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n \n df = pd.read_csv(csv_file)\n \n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n \n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32') ### labels type: array\n item = (path, target)\n samples.append(item)\n assert(len(samples) == len(df))\n self.samples = samples\n \n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n \n path, target = self.samples[index]\n \n ### load image\n img_name = os.path.join(self.root_dir, path) ### get 'image_path'\n image = io.imread(img_name)\n if(len(image.shape) == 3): ### some samples have four channels\n image = image[:,:,0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype = np.uint8) ### Set image channel dim = 3\n for i in range(c):\n images[:,:,i] = image \n assert(images.shape == (1024,1024,3))\n images = Image.fromarray(images)\n\n if self.transform:\n images = self.transform(images)\n\n ### load labels\n labels = torch.from_numpy(target)\n \n ### return tuple\n return (images, labels)\n\n\n\nclass IVCdataset(Dataset):\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n\n df = pd.read_csv(csv_file, header=None)\n\n classes = df.iloc[:,1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print(\"> dataset size: \", df.shape[0])\n\n #load labels\n samples = []\n for i in range(len(df)):\n name = df.iloc[i,0]\n target = df.iloc[i,1].astype('int_')\n item = (name, target)\n samples.append(item)\n assert(len(samples) == len(df))\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n image = io.imread(img_name)\n\n if (len(image.shape)==3):\n image = image[:,:,0]\n\n #with warnings.catch_warnings():\n # warnings.simplefilter(\"ignore\")\n # image = resize(image, (224,224))\n image = image.astype('float32') \n \n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype = np.uint8)\n for i in range(c):\n images[:,:,i] = image\n #assert(images.shape == (1024,1024,3))\n\n images = Image.fromarray(images) \n\n #trans = transforms.ToTensor()\n #images = trans(images) \n \n if self.transform:\n images = self.transform(images)\n \n labels = torch.from_numpy(np.array([target]))\n return (images, labels)\n\n\ndef loadImages(image, basedir):\n #img_name = os.path.join(basedir, image)\n \n #img_name = nibabel.load(img_name).get_data()\n\n #with warnings.catch_warnings():\n # warnings.simplefilter(\"ignore\")\n # image = io.imread(img_name)\n if (len(image.shape)==3):\n image = image[:,:,0]\n image = image.astype('float32') \n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype = np.uint8)\n for i in range(c):\n images[:,:,i] = image\n images = Image.fromarray(images) \n return images\n\n\nclass Ventricles(Dataset):\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None, target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n\n df = pd.read_csv(csv_file, header=None)\n #print(\"Dataset size: \", df.shape[0])\n\n samples = []\n\n #lower = round( len(df) / 5 )\n #upper = round( len(df) / 5 * 4 )\n for i in range(len(df)):\n name = df.iloc[i,0]\n target = df.iloc[i,1]\n\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n\n\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:,:,i]\n target = target_ni[:,:,i]\n item = (name, target)\n samples.append(item)\n \n if train and lower < i < upper:\n for _ in range(3): samples.append(item)\n self.samples = samples\n\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n #images, targets = self.samples[index]\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n #images = image\n #targets = target\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n\n\n\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n\n chance = random.random()\n angle = range(-5,6) #rotate angles -5 to 5\n n_angles = len(angle)\n\n #for i in range(num_transforms * 2):\n # if i/(num_transforms * 2) <= chance < (1 + i)/(num_transforms * 2):\n # train_transformation, target_transformation = RotateFlip( angles[i % num_transforms], i // num_transforms)\n # print('angles/flip', angles[i % num_transforms], i // num_transforms) \n for i in range(n_angles * 4):\n if i/(n_angles * 4) <= chance < (1 + i)/(n_angles * 4):\n input_transform, target_transform = RotateFlipFlip( angle[i % n_angles], i // n_angles, (i // n_angles) % 2)\n\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n #targets_mask = target_transform(targets)\n\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n\n return (images, targets_mask)\n\n\n",
"from __future__ import division\nimport os\nimport warnings\nimport pandas as pd\nimport numpy as np\nimport random\nimport nibabel\nimport torch\nimport torchvision.transforms as transforms\nfrom torch.utils.data import Dataset, DataLoader\nfrom . import data\nfrom .utils import export\nfrom skimage import io\nfrom PIL import Image\nfrom sklearn.metrics import roc_auc_score\nfrom skimage.transform import resize\n\n\n@export\ndef cxr14():\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,\n 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(\n ), transforms.ToTensor(), normalize]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), normalize])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n@export\ndef ventricleNormal():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n chance = random.random()\n angles = range(-5, 6)\n num_transforms = len(angles)\n for i in range(num_transforms * 4):\n if i / (num_transforms * 4) <= chance < (1 + i) / (num_transforms * 4):\n train_transformation, target_transformation = RotateFlipFlip(angles\n [i % num_transforms], i // num_transforms, i //\n num_transforms % 2)\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n eval_target_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n return {'train_transformation': train_transformation,\n 'target_transformation': target_transformation,\n 'eval_transformation': eval_transformation,\n 'eval_target_transformation': eval_target_transformation}\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.247, 0.2435,\n 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([data.\n RandomTranslateWithReflect(4), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.ToTensor(),\n transforms.Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation, 'datadir':\n 'data-local/images/cifar/cifar10/by-image', 'num_classes': 10}\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\ndef loadImages(image, basedir):\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n return images\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n\n\n@export\ndef cxr14():\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,\n 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomResizedCrop(224), transforms.RandomHorizontalFlip(\n ), transforms.ToTensor(), normalize]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), normalize])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n@export\ndef ventricleNormal():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n chance = random.random()\n angles = range(-5, 6)\n num_transforms = len(angles)\n for i in range(num_transforms * 4):\n if i / (num_transforms * 4) <= chance < (1 + i) / (num_transforms * 4):\n train_transformation, target_transformation = RotateFlipFlip(angles\n [i % num_transforms], i // num_transforms, i //\n num_transforms % 2)\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n eval_target_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n return {'train_transformation': train_transformation,\n 'target_transformation': target_transformation,\n 'eval_transformation': eval_transformation,\n 'eval_target_transformation': eval_target_transformation}\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.247, 0.2435,\n 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([data.\n RandomTranslateWithReflect(4), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.ToTensor(),\n transforms.Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation, 'datadir':\n 'data-local/images/cifar/cifar10/by-image', 'num_classes': 10}\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\ndef loadImages(image, basedir):\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n return images\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n@export\ndef ventricleNormal():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n chance = random.random()\n angles = range(-5, 6)\n num_transforms = len(angles)\n for i in range(num_transforms * 4):\n if i / (num_transforms * 4) <= chance < (1 + i) / (num_transforms * 4):\n train_transformation, target_transformation = RotateFlipFlip(angles\n [i % num_transforms], i // num_transforms, i //\n num_transforms % 2)\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n eval_target_transformation = transforms.Compose([transforms.Resize(256),\n transforms.ToTensor()])\n return {'train_transformation': train_transformation,\n 'target_transformation': target_transformation,\n 'eval_transformation': eval_transformation,\n 'eval_target_transformation': eval_target_transformation}\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.247, 0.2435,\n 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([data.\n RandomTranslateWithReflect(4), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.ToTensor(),\n transforms.Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation, 'datadir':\n 'data-local/images/cifar/cifar10/by-image', 'num_classes': 10}\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\ndef loadImages(image, basedir):\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n return images\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n<function token>\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.247, 0.2435,\n 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([data.\n RandomTranslateWithReflect(4), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.ToTensor(),\n transforms.Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation, 'datadir':\n 'data-local/images/cifar/cifar10/by-image', 'num_classes': 10}\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\ndef loadImages(image, basedir):\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n return images\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n<function token>\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n@export\ndef cifar10():\n channel_stats = dict(mean=[0.4914, 0.4822, 0.4465], std=[0.247, 0.2435,\n 0.2616])\n train_transformation = data.TransformTwice(transforms.Compose([data.\n RandomTranslateWithReflect(4), transforms.RandomHorizontalFlip(),\n transforms.ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.ToTensor(),\n transforms.Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation, 'datadir':\n 'data-local/images/cifar/cifar10/by-image', 'num_classes': 10}\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n<function token>\n\n\n@export\ndef imagenet():\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = data.TransformTwice(transforms.Compose([\n transforms.RandomRotation(10), transforms.RandomResizedCrop(224),\n transforms.RandomHorizontalFlip(), transforms.ColorJitter(\n brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1), transforms.\n ToTensor(), transforms.Normalize(**channel_stats)]))\n eval_transformation = transforms.Compose([transforms.Resize(256),\n transforms.CenterCrop(224), transforms.ToTensor(), transforms.\n Normalize(**channel_stats)])\n return {'train_transformation': train_transformation,\n 'eval_transformation': eval_transformation}\n\n\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\ndef RotateFlipFlip(angle, hflip, vflip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=hflip),\n transforms.RandomVerticalFlip(p=vflip), transforms.Resize(256),\n transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\ndef RotateFlip(angle, flip):\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n train_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor(), transforms.Normalize\n (**channel_stats)])\n target_transformation = transforms.Compose([transforms.RandomRotation(\n degrees=(angle, angle)), transforms.RandomHorizontalFlip(p=flip),\n transforms.Resize(256), transforms.ToTensor()])\n return train_transformation, target_transformation\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n\n def __call__(self, img):\n return torch.from_numpy(np.array(img, dtype=np.int32)).long()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n\n\nclass MaskToTensor(object):\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n \"\"\" CXR8 dataset.\"\"\"\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n <docstring token>\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n <docstring token>\n\n def __init__(self, csv_file, root_dir, transform=None):\n \"\"\"\n Args:\n csv_file (string): Path to the csv file with annotations.\n root_dir (string): Directory with all the images.\n transform (callable, optional): Optional transform to be applied\n on a sample.\n \"\"\"\n self.root_dir = root_dir\n self.transform = transform\n df = pd.read_csv(csv_file)\n classes = df.columns[3:].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n samples = []\n for idx in range(len(df)):\n path = df.iloc[idx]['image_path']\n target = df.iloc[idx, 3:].as_matrix().astype('float32')\n item = path, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n <function token>\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n <docstring token>\n <function token>\n <function token>\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.root_dir, path)\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n assert images.shape == (1024, 1024, 3)\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(target)\n return images, labels\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\nclass ChestXRayDataset(Dataset):\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n\n\nclass IVCdataset(Dataset):\n\n def __init__(self, csv_file, path, transform=None):\n \"\"\"\n csv_file = csv where first column = image filenames and second column = classification\n path = directory to all iamges\n \"\"\"\n self.path = path\n self.transform = transform\n df = pd.read_csv(csv_file, header=None)\n classes = df.iloc[:, 1].values.tolist()\n self.class_to_idx = {classes[i]: i for i in range(len(classes))}\n self.idx_to_class = dict(enumerate(classes))\n self.classes = classes\n print('> dataset size: ', df.shape[0])\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1].astype('int_')\n item = name, target\n samples.append(item)\n assert len(samples) == len(df)\n self.samples = samples\n <function token>\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n\n\nclass IVCdataset(Dataset):\n <function token>\n <function token>\n\n def __getitem__(self, index):\n path, target = self.samples[index]\n img_name = os.path.join(self.path, path)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n image = io.imread(img_name)\n if len(image.shape) == 3:\n image = image[:, :, 0]\n image = image.astype('float32')\n h, w = image.shape\n c = 3\n images = np.zeros((h, w, c), dtype=np.uint8)\n for i in range(c):\n images[:, :, i] = image\n images = Image.fromarray(images)\n if self.transform:\n images = self.transform(images)\n labels = torch.from_numpy(np.array([target]))\n return images, labels\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n\n\nclass IVCdataset(Dataset):\n <function token>\n <function token>\n <function token>\n\n\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n<class token>\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n\n def __getitem__(self, index):\n image, target = self.samples[index]\n images = loadImages(image, self.path_raw)\n targets = loadImages(target, self.path_segs)\n tobinary = targets.convert('L')\n targets_mask = tobinary.point(lambda x: 0 if x < 1 else 1, '1')\n if self.train:\n channel_stats = dict(mean=[0.485, 0.456, 0.406], std=[0.229, \n 0.224, 0.225])\n chance = random.random()\n angle = range(-5, 6)\n n_angles = len(angle)\n for i in range(n_angles * 4):\n if i / (n_angles * 4) <= chance < (1 + i) / (n_angles * 4):\n input_transform, target_transform = RotateFlipFlip(angle\n [i % n_angles], i // n_angles, i // n_angles % 2)\n images = input_transform(images)\n targets_mask = target_transform(targets_mask)\n else:\n if self.input_transform:\n images = self.input_transform(images)\n if self.target_transform:\n targets_mask = self.target_transform(targets_mask)\n return images, targets_mask\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n<class token>\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n\n def __len__(self):\n return len(self.samples)\n <function token>\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n<class token>\n<function token>\n\n\nclass Ventricles(Dataset):\n\n def __init__(self, csv_file, path_raw, path_segs, input_transform=None,\n target_transform=None, train=False):\n self.path_raw = path_raw\n self.path_segs = path_segs\n self.input_transform = input_transform\n self.target_transform = target_transform\n self.train = train\n df = pd.read_csv(csv_file, header=None)\n samples = []\n for i in range(len(df)):\n name = df.iloc[i, 0]\n target = df.iloc[i, 1]\n image_name = os.path.join(path_raw, name)\n target_name = os.path.join(path_segs, target)\n image_ni = nibabel.load(image_name).get_data()\n target_ni = nibabel.load(target_name).get_data()\n slices = image_ni.shape[2]\n lower = slices / 4\n upper = slices / 4 * 3\n for i in range(slices):\n name = image_ni[:, :, i]\n target = target_ni[:, :, i]\n item = name, target\n samples.append(item)\n if train and lower < i < upper:\n for _ in range(3):\n samples.append(item)\n self.samples = samples\n <function token>\n <function token>\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n<class token>\n<function token>\n\n\nclass Ventricles(Dataset):\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<function token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<class token>\n<class token>\n<function token>\n<class token>\n"
] | false |
99,012 |
cc66565cd14316413fbb52da70a8478455625c2e
|
from django.db import models
from django.contrib.auth.models import User
class Product(models.Model):
name = models.CharField(max_length=200)
image = models.URLField()
price = models.IntegerField()
description = models.TextField()
class Favorite(models.Model):
user = models.ForeignKey(User)
product = models.ForeignKey(Product)
created = models.DateTimeField(auto_now_add=True)
class Comment(models.Model):
user = models.ForeignKey(User)
product = models.ForeignKey(Product)
content = models.TextField()
created = models.DateTimeField(auto_now_add=True)
|
[
"from django.db import models\nfrom django.contrib.auth.models import User\n\nclass Product(models.Model):\n\tname = models.CharField(max_length=200)\n\timage = models.URLField()\n\tprice = models.IntegerField()\n\tdescription = models.TextField()\n\n\nclass Favorite(models.Model):\n\tuser = models.ForeignKey(User)\n\tproduct = models.ForeignKey(Product)\n\tcreated = models.DateTimeField(auto_now_add=True)\n\nclass Comment(models.Model):\n\tuser = models.ForeignKey(User)\n\tproduct = models.ForeignKey(Product)\n\tcontent = models.TextField()\n\tcreated = models.DateTimeField(auto_now_add=True)",
"from django.db import models\nfrom django.contrib.auth.models import User\n\n\nclass Product(models.Model):\n name = models.CharField(max_length=200)\n image = models.URLField()\n price = models.IntegerField()\n description = models.TextField()\n\n\nclass Favorite(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n created = models.DateTimeField(auto_now_add=True)\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n\n\nclass Product(models.Model):\n name = models.CharField(max_length=200)\n image = models.URLField()\n price = models.IntegerField()\n description = models.TextField()\n\n\nclass Favorite(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n created = models.DateTimeField(auto_now_add=True)\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n\n\nclass Product(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass Favorite(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n created = models.DateTimeField(auto_now_add=True)\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n<class token>\n\n\nclass Favorite(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n created = models.DateTimeField(auto_now_add=True)\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n<class token>\n\n\nclass Favorite(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass Comment(models.Model):\n user = models.ForeignKey(User)\n product = models.ForeignKey(Product)\n content = models.TextField()\n created = models.DateTimeField(auto_now_add=True)\n",
"<import token>\n<class token>\n<class token>\n\n\nclass Comment(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,013 |
5d227c1c1035966443f2e177d29b1ea8c93de5c5
|
#!/usr/bin/env python
# coding = utf-8
# from socket import *
import socketserver
# from time import ctime
from DongRuanAPI import *
# import threading
import queue
# import multiprocessing
#
myque_senddata = queue.Queue(0)
myque_ansdata = queue.Queue(0)
def senddata(num):
# while True:
# # print ('muity thread num info %d' %num)
# # time.sleep(3)
# if not myque_senddata.empty():
# print("[*] %d thread start work....." % num)
''' case when '''
# data = myque_senddata.get()
data = num
switch_var = {}
switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']
switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']
switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']
switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']
try:
s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])
except Exception as e:
s_data = e
# myque_ansdata.put(s_data)
return s_data
class DongRuanTestSerivce(socketserver.BaseRequestHandler):
print('[*] waiting for connection ......')
def handle(self):
# 处理请求信息
while True:
addr = self.client_address
print ('[*] connected from:' , addr)
self.data = self.request.recv(1024)
# print (self.data)
if not self.data:
break
# myque_senddata.put(self.data)
# s_data = myque_ansdata.get()
s_data = senddata(self.data)
self.request.sendall(s_data.encode('gbk'))
print('[*] waiting for connection ......')
if __name__ == '__main__':
# 定义服务器配置
HOST = '192.168.1.58'
PORT = 2424
BUFSIZ = 1024
ADDR = (HOST, PORT)
# # 开启服务器, 持续监听
# tcpSersock = socket(AF_INET, SOCK_STREAM)
# tcpSersock.bind(ADDR)
# tcpSersock.listen(5)
# for num in range(5):
# # multiprocessing.Process(target=startrun.senddata).start()
# threading.Thread(target= senddata, args=(num, )).start()
# # threading.Thread(target=startrun.start).start()
server = socketserver.ThreadingTCPServer(ADDR, DongRuanTestSerivce)
server.serve_forever()
# server = ForkingTCPServer(ADDR, DongRuanTestSerivce)
# server_thread = threading.Thread(target=server.serve_forever)
# server_thread.start()
# startrun = DongRuanTestSerivce()
# lock = multiprocessing.Lock() # 这个一定要定义为全局
|
[
"#!/usr/bin/env python\n# coding = utf-8\n\n# from socket import *\nimport socketserver\n# from time import ctime\nfrom DongRuanAPI import *\n\n# import threading\nimport queue\n# import multiprocessing\n# \nmyque_senddata = queue.Queue(0)\nmyque_ansdata = queue.Queue(0)\n\n\ndef senddata(num):\n # while True:\n # # print ('muity thread num info %d' %num)\n # # time.sleep(3)\n\n # if not myque_senddata.empty():\n\n # print(\"[*] %d thread start work.....\" % num)\n\n\n\n ''' case when '''\n # data = myque_senddata.get()\n data = num\n\n switch_var = {}\n switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']\n switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']\n switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']\n switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']\n\n try:\n\n s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])\n\n except Exception as e:\n s_data = e\n\n # myque_ansdata.put(s_data)\n\n return s_data\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n\n print('[*] waiting for connection ......')\n\n def handle(self):\n\n # 处理请求信息\n while True:\n\n addr = self.client_address\n print ('[*] connected from:' , addr)\n\n self.data = self.request.recv(1024)\n # print (self.data)\n\n if not self.data:\n break\n\n # myque_senddata.put(self.data)\n # s_data = myque_ansdata.get()\n s_data = senddata(self.data)\n\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\nif __name__ == '__main__':\n\n # 定义服务器配置\n HOST = '192.168.1.58'\n PORT = 2424\n BUFSIZ = 1024\n ADDR = (HOST, PORT)\n\n # # 开启服务器, 持续监听\n # tcpSersock = socket(AF_INET, SOCK_STREAM)\n # tcpSersock.bind(ADDR)\n # tcpSersock.listen(5)\n\n # for num in range(5):\n # # multiprocessing.Process(target=startrun.senddata).start()\n # threading.Thread(target= senddata, args=(num, )).start()\n # # threading.Thread(target=startrun.start).start()\n\n server = socketserver.ThreadingTCPServer(ADDR, DongRuanTestSerivce)\n server.serve_forever()\n\n # server = ForkingTCPServer(ADDR, DongRuanTestSerivce)\n # server_thread = threading.Thread(target=server.serve_forever)\n # server_thread.start()\n\n # startrun = DongRuanTestSerivce()\n # lock = multiprocessing.Lock() # 这个一定要定义为全局\n\n",
"import socketserver\nfrom DongRuanAPI import *\nimport queue\nmyque_senddata = queue.Queue(0)\nmyque_ansdata = queue.Queue(0)\n\n\ndef senddata(num):\n \"\"\" case when \"\"\"\n data = num\n switch_var = {}\n switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']\n switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']\n switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']\n switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']\n try:\n s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])\n except Exception as e:\n s_data = e\n return s_data\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n\n def handle(self):\n while True:\n addr = self.client_address\n print('[*] connected from:', addr)\n self.data = self.request.recv(1024)\n if not self.data:\n break\n s_data = senddata(self.data)\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\n\nif __name__ == '__main__':\n HOST = '192.168.1.58'\n PORT = 2424\n BUFSIZ = 1024\n ADDR = HOST, PORT\n server = socketserver.ThreadingTCPServer(ADDR, DongRuanTestSerivce)\n server.serve_forever()\n",
"<import token>\nmyque_senddata = queue.Queue(0)\nmyque_ansdata = queue.Queue(0)\n\n\ndef senddata(num):\n \"\"\" case when \"\"\"\n data = num\n switch_var = {}\n switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']\n switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']\n switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']\n switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']\n try:\n s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])\n except Exception as e:\n s_data = e\n return s_data\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n\n def handle(self):\n while True:\n addr = self.client_address\n print('[*] connected from:', addr)\n self.data = self.request.recv(1024)\n if not self.data:\n break\n s_data = senddata(self.data)\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\n\nif __name__ == '__main__':\n HOST = '192.168.1.58'\n PORT = 2424\n BUFSIZ = 1024\n ADDR = HOST, PORT\n server = socketserver.ThreadingTCPServer(ADDR, DongRuanTestSerivce)\n server.serve_forever()\n",
"<import token>\n<assignment token>\n\n\ndef senddata(num):\n \"\"\" case when \"\"\"\n data = num\n switch_var = {}\n switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']\n switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']\n switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']\n switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']\n try:\n s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])\n except Exception as e:\n s_data = e\n return s_data\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n\n def handle(self):\n while True:\n addr = self.client_address\n print('[*] connected from:', addr)\n self.data = self.request.recv(1024)\n if not self.data:\n break\n s_data = senddata(self.data)\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\n\nif __name__ == '__main__':\n HOST = '192.168.1.58'\n PORT = 2424\n BUFSIZ = 1024\n ADDR = HOST, PORT\n server = socketserver.ThreadingTCPServer(ADDR, DongRuanTestSerivce)\n server.serve_forever()\n",
"<import token>\n<assignment token>\n\n\ndef senddata(num):\n \"\"\" case when \"\"\"\n data = num\n switch_var = {}\n switch_var['XXCX'] = ['XXCX_SEND.xml', 'XXCX_ANS.xml']\n switch_var['DNCX'] = ['DNCX_SEND.xml', 'DNCX_ANS.xml']\n switch_var['DNHD'] = ['DNHD_SEND.xml', 'DNHD_ANS.xml']\n switch_var['DNDZ'] = ['DNDZ_SEND.xml', 'DNDZ_ANS.xml']\n try:\n s_data = DongRuanAPI(data, switch_var[data[0:4].decode('gbk')])\n except Exception as e:\n s_data = e\n return s_data\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n\n def handle(self):\n while True:\n addr = self.client_address\n print('[*] connected from:', addr)\n self.data = self.request.recv(1024)\n if not self.data:\n break\n s_data = senddata(self.data)\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n\n def handle(self):\n while True:\n addr = self.client_address\n print('[*] connected from:', addr)\n self.data = self.request.recv(1024)\n if not self.data:\n break\n s_data = senddata(self.data)\n self.request.sendall(s_data.encode('gbk'))\n print('[*] waiting for connection ......')\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n\n\nclass DongRuanTestSerivce(socketserver.BaseRequestHandler):\n print('[*] waiting for connection ......')\n <function token>\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<class token>\n<code token>\n"
] | false |
99,014 |
7e8c4e3e0cdbf22b9d554c4e874ec7c0d57d1116
|
from scipy.interpolate import lagrange
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from sympy.solvers import nsolve
from sympy import Symbol
import matplotlib as mpl
def inside_obstacle(point, obstacle):
"""
returns 1 if the point is inside any obstacles
0 otherwise
"""
for obs in obstacle:
if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:
return 1
return 0
def through_obstacle(line, obstacles):
"""
returns 1 if the line goes through any obstacles
0 otherwise
"""
noofpoints = 100
for i in range(noofpoints):
if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:
return 1
return 0
xlimits=(-2,12)
ylimits=(-5,5)
start=(0,0)
goal=(10,0)
obstacles=[[(3.5,4.5,4.5,3.5),(0.5,0.5,1.5,1.5)],
[(6.5,7.5,7.5,6.5),(-1.5,-1.5,-0.5,-0.5)]]
# xlimits=(-2.,15.)
# ylimits=(-2.,15.)
# start=[0,0]
# goal=[10,10]
# obstacles=[[(1,2,2,1),(1,1,5,5)],
# [(3,4,4,3),(4,4,12,12)],
# [(3,12,12,3),(12,12,13,13)],
# [(12,13,13,12),(5,5,13,13)],
# [(6,12,12,6),(5,5,6,6)]]
# xlimits = (-6, 36)
# ylimits = (-6, 6)
# obstacles = [[(-6, 25, 25, -6), (-6, -6, -5, -5)],
# [(-6, 30, 30, -6), (5, 5, 6, 6)],
# [(-6, -5, -5, -6), (-5, -5, 5, 5)],
# [(4, 5, 5, 4), (-5, -5, 1, 1)],
# [(9, 10, 10, 9), (0, 0, 5, 5)],
# [(14, 15, 15, 14), (-5, -5, 1, 1)],
# [(19, 20, 20, 19), (0, 0, 5, 5)],
# [(24, 25, 25, 24), (-5, -5, 1, 1)],
# [(29, 30, 30, 29), (0, 0, 5, 5)]]
# start = [0, 0]
# goal = [35, 0]
pathi = []
pathj = []
with open('IFS.txt', 'r') as f:
for line in f:
for ele in range(len(line)):
if line[ele] == '\t':
br = ele
break
pathi.append(float(line[0:br]))
pathj.append(float(line[br+1:-2]))
finalpath = list(np.transpose(np.vstack((pathi, pathj))))
f = []
for i in range(len(finalpath)):
f.append(list(finalpath[i]))
finalpath=f
newfinalpath = []
newfinalpath.append(finalpath[0])
while str(newfinalpath[-1]) != str(finalpath[-1]):
print(newfinalpath[-1])
indx = finalpath.index(newfinalpath[-1])
for i in range(indx, len(finalpath)):
if i == len(finalpath)-1:
newfinalpath.append(finalpath[-1])
break
if through_obstacle((finalpath[indx][0], finalpath[indx][1], finalpath[i][0], finalpath[i][1]), obstacles) == 1:
newfinalpath.append(finalpath[i-1])
break
newfinalpath = np.transpose(newfinalpath)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(*newfinalpath,color='orange')
for obs in obstacles:
ax.fill(*obs, 'k', alpha=1)
plt.xlim(*xlimits)
plt.ylim(*ylimits)
pathi=newfinalpath[0]
pathj=newfinalpath[1]
a=Symbol('a')
b=Symbol('b')
c=Symbol('c')
d=Symbol('d')
e=Symbol('e')
totalx=[]
totaly=[]
x=np.linspace(pathi[0],(pathi[0]+pathi[1])/2,20)
y=np.linspace(pathj[0],(pathj[0]+pathj[1])/2,20)
for points in range(len(x)):
totalx.append(x[points])
totaly.append(y[points])
for i in range(int(len(pathi)-2)):
f1=a*((pathi[i]+pathi[i+1])/2)**4+b*((pathi[i]+pathi[i+1])/2)**3+c*((pathi[i]+pathi[i+1])/2)**2+d*((pathi[i]+pathi[i+1])/2)**1+e-(pathj[i]+pathj[i+1])/2
f2=a*((pathi[i+1]))**4+b*((pathi[i+1]))**3+c*((pathi[i+1]))**2+d*((pathi[i+1]))**1+e-(pathj[i+1])
f3=a*((pathi[i+1]+pathi[i+2])/2)**4+b*((pathi[i+1]+pathi[i+2])/2)**3+c*((pathi[i+1]+pathi[i+2])/2)**2+d*((pathi[i+1]+pathi[i+2])/2)**1+e-(pathj[i+1]+pathj[i+2])/2
f4=4*a*((pathi[i]+pathi[i+1])/2)**3+3*b*((pathi[i]+pathi[i+1])/2)**2+2*c*((pathi[i]+pathi[i+1])/2)**1+d-((pathj[i+1]-pathj[i])/(pathi[i+1]-pathi[i]))
f5=4*a*((pathi[i+1]+pathi[i+2])/2)**3+3*b*((pathi[i+1]+pathi[i+2])/2)**2+2*c*((pathi[i+1]+pathi[i+2])/2)**1+d-((pathj[i+2]-pathj[i+1])/(pathi[i+2]-pathi[i+1]))
variables=nsolve((f1,f2,f3,f4,f5),(a,b,c,d,e),(0,0,0,0,0))
x=list(np.linspace((pathi[i]+pathi[i+1])/2,(pathi[i+1]+pathi[i+2])/2,20))
for points in range(len(x)):
totalx.append(x[points])
totaly.append(variables[0]*x[points]**4+variables[1]*x[points]**3+variables[2]*x[points]**2+variables[3]*x[points]**1+variables[4])
x=np.linspace((pathi[-1]+pathi[-2])/2,(pathi[-1]),20)
y=np.linspace((pathj[-1]+pathj[-2])/2,(pathj[-1]),20)
for points in range(len(x)):
totalx.append(x[points])
totaly.append(y[points])
ax.plot(totalx,totaly,color='blue')
# totalx.append(x)
# totaly.append(y)
# plt.plot([(pathi[-1]+pathi[-2])/2,pathi[-1]],[(pathj[-1]+pathj[-2])/2,pathj[-1]],color='blue')
totalxdash=np.gradient(totalx)
totalydash=np.gradient(totaly)
totalydashdash=np.gradient(totalydash)
R=[]
for radius in range(len(totaly)):
R.append(1/((totalydashdash[radius])*(1+(totalydash[radius])**2)**1.5))
# print(R)
# print('R=',R)
l=1.5
theta=[]
for radii in R:
theta.append(round(np.degrees(np.arctan(l/radii)),4))
print('x=',totalx)
print('theta=',theta)
plt.plot(start[0],start[1], 'o',color='red')
plt.plot(goal[0],goal[1], 'o',color='green')
plt.legend(["Eliminate Redundant Nodes","Smooth Curve","Start","Goal"])
plt.show()
|
[
"from scipy.interpolate import lagrange\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib.patches import Rectangle\r\nfrom sympy.solvers import nsolve\r\nfrom sympy import Symbol\r\nimport matplotlib as mpl\r\n\r\n\r\ndef inside_obstacle(point, obstacle):\r\n \"\"\"\r\n returns 1 if the point is inside any obstacles\r\n 0 otherwise\r\n \"\"\"\r\n for obs in obstacle:\r\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][0] and point[1] < obs[1][2]:\r\n return 1\r\n return 0\r\n\r\n\r\ndef through_obstacle(line, obstacles):\r\n \"\"\"\r\n returns 1 if the line goes through any obstacles\r\n 0 otherwise\r\n \"\"\"\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0\r\n\r\nxlimits=(-2,12)\r\nylimits=(-5,5)\r\nstart=(0,0)\r\ngoal=(10,0)\r\nobstacles=[[(3.5,4.5,4.5,3.5),(0.5,0.5,1.5,1.5)],\r\n [(6.5,7.5,7.5,6.5),(-1.5,-1.5,-0.5,-0.5)]]\r\n\r\n# xlimits=(-2.,15.)\r\n# ylimits=(-2.,15.)\r\n# start=[0,0]\r\n# goal=[10,10]\r\n# obstacles=[[(1,2,2,1),(1,1,5,5)],\r\n# [(3,4,4,3),(4,4,12,12)],\r\n# [(3,12,12,3),(12,12,13,13)],\r\n# [(12,13,13,12),(5,5,13,13)],\r\n# [(6,12,12,6),(5,5,6,6)]]\r\n\r\n# xlimits = (-6, 36)\r\n# ylimits = (-6, 6)\r\n# obstacles = [[(-6, 25, 25, -6), (-6, -6, -5, -5)],\r\n# [(-6, 30, 30, -6), (5, 5, 6, 6)],\r\n# [(-6, -5, -5, -6), (-5, -5, 5, 5)],\r\n# [(4, 5, 5, 4), (-5, -5, 1, 1)],\r\n# [(9, 10, 10, 9), (0, 0, 5, 5)],\r\n# [(14, 15, 15, 14), (-5, -5, 1, 1)],\r\n# [(19, 20, 20, 19), (0, 0, 5, 5)],\r\n# [(24, 25, 25, 24), (-5, -5, 1, 1)],\r\n# [(29, 30, 30, 29), (0, 0, 5, 5)]]\r\n# start = [0, 0]\r\n# goal = [35, 0]\r\n\r\npathi = []\r\npathj = []\r\nwith open('IFS.txt', 'r') as f:\r\n for line in f:\r\n for ele in range(len(line)):\r\n if line[ele] == '\\t':\r\n br = ele\r\n break\r\n pathi.append(float(line[0:br]))\r\n pathj.append(float(line[br+1:-2]))\r\n\r\nfinalpath = list(np.transpose(np.vstack((pathi, pathj))))\r\n\r\nf = []\r\n\r\nfor i in range(len(finalpath)):\r\n f.append(list(finalpath[i]))\r\n\r\nfinalpath=f\r\n\r\nnewfinalpath = []\r\n\r\nnewfinalpath.append(finalpath[0])\r\n\r\nwhile str(newfinalpath[-1]) != str(finalpath[-1]):\r\n\r\n print(newfinalpath[-1])\r\n indx = finalpath.index(newfinalpath[-1])\r\n\r\n for i in range(indx, len(finalpath)):\r\n if i == len(finalpath)-1:\r\n newfinalpath.append(finalpath[-1])\r\n break\r\n if through_obstacle((finalpath[indx][0], finalpath[indx][1], finalpath[i][0], finalpath[i][1]), obstacles) == 1:\r\n newfinalpath.append(finalpath[i-1])\r\n break\r\n\r\nnewfinalpath = np.transpose(newfinalpath)\r\n\r\nfig = plt.figure() \r\n \r\nax = fig.add_subplot(111) \r\n\r\nax.plot(*newfinalpath,color='orange')\r\n\r\nfor obs in obstacles:\r\n ax.fill(*obs, 'k', alpha=1)\r\nplt.xlim(*xlimits)\r\nplt.ylim(*ylimits)\r\n\r\npathi=newfinalpath[0]\r\npathj=newfinalpath[1]\r\n\r\na=Symbol('a')\r\nb=Symbol('b')\r\nc=Symbol('c')\r\nd=Symbol('d')\r\ne=Symbol('e')\r\n\r\ntotalx=[]\r\ntotaly=[]\r\n\r\nx=np.linspace(pathi[0],(pathi[0]+pathi[1])/2,20)\r\ny=np.linspace(pathj[0],(pathj[0]+pathj[1])/2,20)\r\n\r\nfor points in range(len(x)):\r\n totalx.append(x[points])\r\n totaly.append(y[points])\r\n\r\nfor i in range(int(len(pathi)-2)):\r\n f1=a*((pathi[i]+pathi[i+1])/2)**4+b*((pathi[i]+pathi[i+1])/2)**3+c*((pathi[i]+pathi[i+1])/2)**2+d*((pathi[i]+pathi[i+1])/2)**1+e-(pathj[i]+pathj[i+1])/2\r\n f2=a*((pathi[i+1]))**4+b*((pathi[i+1]))**3+c*((pathi[i+1]))**2+d*((pathi[i+1]))**1+e-(pathj[i+1])\r\n f3=a*((pathi[i+1]+pathi[i+2])/2)**4+b*((pathi[i+1]+pathi[i+2])/2)**3+c*((pathi[i+1]+pathi[i+2])/2)**2+d*((pathi[i+1]+pathi[i+2])/2)**1+e-(pathj[i+1]+pathj[i+2])/2\r\n\r\n f4=4*a*((pathi[i]+pathi[i+1])/2)**3+3*b*((pathi[i]+pathi[i+1])/2)**2+2*c*((pathi[i]+pathi[i+1])/2)**1+d-((pathj[i+1]-pathj[i])/(pathi[i+1]-pathi[i]))\r\n f5=4*a*((pathi[i+1]+pathi[i+2])/2)**3+3*b*((pathi[i+1]+pathi[i+2])/2)**2+2*c*((pathi[i+1]+pathi[i+2])/2)**1+d-((pathj[i+2]-pathj[i+1])/(pathi[i+2]-pathi[i+1]))\r\n\r\n variables=nsolve((f1,f2,f3,f4,f5),(a,b,c,d,e),(0,0,0,0,0))\r\n\r\n x=list(np.linspace((pathi[i]+pathi[i+1])/2,(pathi[i+1]+pathi[i+2])/2,20))\r\n\r\n for points in range(len(x)):\r\n totalx.append(x[points])\r\n totaly.append(variables[0]*x[points]**4+variables[1]*x[points]**3+variables[2]*x[points]**2+variables[3]*x[points]**1+variables[4])\r\n\r\nx=np.linspace((pathi[-1]+pathi[-2])/2,(pathi[-1]),20)\r\ny=np.linspace((pathj[-1]+pathj[-2])/2,(pathj[-1]),20)\r\n\r\nfor points in range(len(x)):\r\n totalx.append(x[points])\r\n totaly.append(y[points])\r\n\r\nax.plot(totalx,totaly,color='blue')\r\n# totalx.append(x)\r\n# totaly.append(y)\r\n\r\n# plt.plot([(pathi[-1]+pathi[-2])/2,pathi[-1]],[(pathj[-1]+pathj[-2])/2,pathj[-1]],color='blue')\r\ntotalxdash=np.gradient(totalx)\r\ntotalydash=np.gradient(totaly)\r\ntotalydashdash=np.gradient(totalydash)\r\n\r\nR=[]\r\nfor radius in range(len(totaly)):\r\n R.append(1/((totalydashdash[radius])*(1+(totalydash[radius])**2)**1.5))\r\n # print(R)\r\n\r\n# print('R=',R)\r\n\r\nl=1.5\r\ntheta=[]\r\nfor radii in R:\r\n theta.append(round(np.degrees(np.arctan(l/radii)),4))\r\n\r\nprint('x=',totalx)\r\nprint('theta=',theta)\r\n\r\nplt.plot(start[0],start[1], 'o',color='red')\r\nplt.plot(goal[0],goal[1], 'o',color='green')\r\n\r\nplt.legend([\"Eliminate Redundant Nodes\",\"Smooth Curve\",\"Start\",\"Goal\"])\r\n\r\nplt.show()\r\n",
"from scipy.interpolate import lagrange\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib.patches import Rectangle\nfrom sympy.solvers import nsolve\nfrom sympy import Symbol\nimport matplotlib as mpl\n\n\ndef inside_obstacle(point, obstacle):\n \"\"\"\n returns 1 if the point is inside any obstacles\n 0 otherwise\n \"\"\"\n for obs in obstacle:\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][\n 0] and point[1] < obs[1][2]:\n return 1\n return 0\n\n\ndef through_obstacle(line, obstacles):\n \"\"\"\n returns 1 if the line goes through any obstacles\n 0 otherwise\n \"\"\"\n noofpoints = 100\n for i in range(noofpoints):\n if inside_obstacle((line[0] + i * (line[2] - line[0]) / noofpoints,\n line[1] + i * (line[3] - line[1]) / noofpoints), obstacles) == 1:\n return 1\n return 0\n\n\nxlimits = -2, 12\nylimits = -5, 5\nstart = 0, 0\ngoal = 10, 0\nobstacles = [[(3.5, 4.5, 4.5, 3.5), (0.5, 0.5, 1.5, 1.5)], [(6.5, 7.5, 7.5,\n 6.5), (-1.5, -1.5, -0.5, -0.5)]]\npathi = []\npathj = []\nwith open('IFS.txt', 'r') as f:\n for line in f:\n for ele in range(len(line)):\n if line[ele] == '\\t':\n br = ele\n break\n pathi.append(float(line[0:br]))\n pathj.append(float(line[br + 1:-2]))\nfinalpath = list(np.transpose(np.vstack((pathi, pathj))))\nf = []\nfor i in range(len(finalpath)):\n f.append(list(finalpath[i]))\nfinalpath = f\nnewfinalpath = []\nnewfinalpath.append(finalpath[0])\nwhile str(newfinalpath[-1]) != str(finalpath[-1]):\n print(newfinalpath[-1])\n indx = finalpath.index(newfinalpath[-1])\n for i in range(indx, len(finalpath)):\n if i == len(finalpath) - 1:\n newfinalpath.append(finalpath[-1])\n break\n if through_obstacle((finalpath[indx][0], finalpath[indx][1],\n finalpath[i][0], finalpath[i][1]), obstacles) == 1:\n newfinalpath.append(finalpath[i - 1])\n break\nnewfinalpath = np.transpose(newfinalpath)\nfig = plt.figure()\nax = fig.add_subplot(111)\nax.plot(*newfinalpath, color='orange')\nfor obs in obstacles:\n ax.fill(*obs, 'k', alpha=1)\nplt.xlim(*xlimits)\nplt.ylim(*ylimits)\npathi = newfinalpath[0]\npathj = newfinalpath[1]\na = Symbol('a')\nb = Symbol('b')\nc = Symbol('c')\nd = Symbol('d')\ne = Symbol('e')\ntotalx = []\ntotaly = []\nx = np.linspace(pathi[0], (pathi[0] + pathi[1]) / 2, 20)\ny = np.linspace(pathj[0], (pathj[0] + pathj[1]) / 2, 20)\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nfor i in range(int(len(pathi) - 2)):\n f1 = a * ((pathi[i] + pathi[i + 1]) / 2) ** 4 + b * ((pathi[i] + pathi[\n i + 1]) / 2) ** 3 + c * ((pathi[i] + pathi[i + 1]) / 2) ** 2 + d * ((\n pathi[i] + pathi[i + 1]) / 2) ** 1 + e - (pathj[i] + pathj[i + 1]) / 2\n f2 = a * pathi[i + 1] ** 4 + b * pathi[i + 1] ** 3 + c * pathi[i + 1\n ] ** 2 + d * pathi[i + 1] ** 1 + e - pathj[i + 1]\n f3 = a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 4 + b * ((pathi[i + 1] +\n pathi[i + 2]) / 2) ** 3 + c * ((pathi[i + 1] + pathi[i + 2]) / 2\n ) ** 2 + d * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 1 + e - (pathj[\n i + 1] + pathj[i + 2]) / 2\n f4 = 4 * a * ((pathi[i] + pathi[i + 1]) / 2) ** 3 + 3 * b * ((pathi[i] +\n pathi[i + 1]) / 2) ** 2 + 2 * c * ((pathi[i] + pathi[i + 1]) / 2\n ) ** 1 + d - (pathj[i + 1] - pathj[i]) / (pathi[i + 1] - pathi[i])\n f5 = 4 * a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 3 + 3 * b * ((pathi\n [i + 1] + pathi[i + 2]) / 2) ** 2 + 2 * c * ((pathi[i + 1] + pathi[\n i + 2]) / 2) ** 1 + d - (pathj[i + 2] - pathj[i + 1]) / (pathi[i + \n 2] - pathi[i + 1])\n variables = nsolve((f1, f2, f3, f4, f5), (a, b, c, d, e), (0, 0, 0, 0, 0))\n x = list(np.linspace((pathi[i] + pathi[i + 1]) / 2, (pathi[i + 1] +\n pathi[i + 2]) / 2, 20))\n for points in range(len(x)):\n totalx.append(x[points])\n totaly.append(variables[0] * x[points] ** 4 + variables[1] * x[\n points] ** 3 + variables[2] * x[points] ** 2 + variables[3] * x\n [points] ** 1 + variables[4])\nx = np.linspace((pathi[-1] + pathi[-2]) / 2, pathi[-1], 20)\ny = np.linspace((pathj[-1] + pathj[-2]) / 2, pathj[-1], 20)\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nax.plot(totalx, totaly, color='blue')\ntotalxdash = np.gradient(totalx)\ntotalydash = np.gradient(totaly)\ntotalydashdash = np.gradient(totalydash)\nR = []\nfor radius in range(len(totaly)):\n R.append(1 / (totalydashdash[radius] * (1 + totalydash[radius] ** 2) **\n 1.5))\nl = 1.5\ntheta = []\nfor radii in R:\n theta.append(round(np.degrees(np.arctan(l / radii)), 4))\nprint('x=', totalx)\nprint('theta=', theta)\nplt.plot(start[0], start[1], 'o', color='red')\nplt.plot(goal[0], goal[1], 'o', color='green')\nplt.legend(['Eliminate Redundant Nodes', 'Smooth Curve', 'Start', 'Goal'])\nplt.show()\n",
"<import token>\n\n\ndef inside_obstacle(point, obstacle):\n \"\"\"\n returns 1 if the point is inside any obstacles\n 0 otherwise\n \"\"\"\n for obs in obstacle:\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][\n 0] and point[1] < obs[1][2]:\n return 1\n return 0\n\n\ndef through_obstacle(line, obstacles):\n \"\"\"\n returns 1 if the line goes through any obstacles\n 0 otherwise\n \"\"\"\n noofpoints = 100\n for i in range(noofpoints):\n if inside_obstacle((line[0] + i * (line[2] - line[0]) / noofpoints,\n line[1] + i * (line[3] - line[1]) / noofpoints), obstacles) == 1:\n return 1\n return 0\n\n\nxlimits = -2, 12\nylimits = -5, 5\nstart = 0, 0\ngoal = 10, 0\nobstacles = [[(3.5, 4.5, 4.5, 3.5), (0.5, 0.5, 1.5, 1.5)], [(6.5, 7.5, 7.5,\n 6.5), (-1.5, -1.5, -0.5, -0.5)]]\npathi = []\npathj = []\nwith open('IFS.txt', 'r') as f:\n for line in f:\n for ele in range(len(line)):\n if line[ele] == '\\t':\n br = ele\n break\n pathi.append(float(line[0:br]))\n pathj.append(float(line[br + 1:-2]))\nfinalpath = list(np.transpose(np.vstack((pathi, pathj))))\nf = []\nfor i in range(len(finalpath)):\n f.append(list(finalpath[i]))\nfinalpath = f\nnewfinalpath = []\nnewfinalpath.append(finalpath[0])\nwhile str(newfinalpath[-1]) != str(finalpath[-1]):\n print(newfinalpath[-1])\n indx = finalpath.index(newfinalpath[-1])\n for i in range(indx, len(finalpath)):\n if i == len(finalpath) - 1:\n newfinalpath.append(finalpath[-1])\n break\n if through_obstacle((finalpath[indx][0], finalpath[indx][1],\n finalpath[i][0], finalpath[i][1]), obstacles) == 1:\n newfinalpath.append(finalpath[i - 1])\n break\nnewfinalpath = np.transpose(newfinalpath)\nfig = plt.figure()\nax = fig.add_subplot(111)\nax.plot(*newfinalpath, color='orange')\nfor obs in obstacles:\n ax.fill(*obs, 'k', alpha=1)\nplt.xlim(*xlimits)\nplt.ylim(*ylimits)\npathi = newfinalpath[0]\npathj = newfinalpath[1]\na = Symbol('a')\nb = Symbol('b')\nc = Symbol('c')\nd = Symbol('d')\ne = Symbol('e')\ntotalx = []\ntotaly = []\nx = np.linspace(pathi[0], (pathi[0] + pathi[1]) / 2, 20)\ny = np.linspace(pathj[0], (pathj[0] + pathj[1]) / 2, 20)\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nfor i in range(int(len(pathi) - 2)):\n f1 = a * ((pathi[i] + pathi[i + 1]) / 2) ** 4 + b * ((pathi[i] + pathi[\n i + 1]) / 2) ** 3 + c * ((pathi[i] + pathi[i + 1]) / 2) ** 2 + d * ((\n pathi[i] + pathi[i + 1]) / 2) ** 1 + e - (pathj[i] + pathj[i + 1]) / 2\n f2 = a * pathi[i + 1] ** 4 + b * pathi[i + 1] ** 3 + c * pathi[i + 1\n ] ** 2 + d * pathi[i + 1] ** 1 + e - pathj[i + 1]\n f3 = a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 4 + b * ((pathi[i + 1] +\n pathi[i + 2]) / 2) ** 3 + c * ((pathi[i + 1] + pathi[i + 2]) / 2\n ) ** 2 + d * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 1 + e - (pathj[\n i + 1] + pathj[i + 2]) / 2\n f4 = 4 * a * ((pathi[i] + pathi[i + 1]) / 2) ** 3 + 3 * b * ((pathi[i] +\n pathi[i + 1]) / 2) ** 2 + 2 * c * ((pathi[i] + pathi[i + 1]) / 2\n ) ** 1 + d - (pathj[i + 1] - pathj[i]) / (pathi[i + 1] - pathi[i])\n f5 = 4 * a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 3 + 3 * b * ((pathi\n [i + 1] + pathi[i + 2]) / 2) ** 2 + 2 * c * ((pathi[i + 1] + pathi[\n i + 2]) / 2) ** 1 + d - (pathj[i + 2] - pathj[i + 1]) / (pathi[i + \n 2] - pathi[i + 1])\n variables = nsolve((f1, f2, f3, f4, f5), (a, b, c, d, e), (0, 0, 0, 0, 0))\n x = list(np.linspace((pathi[i] + pathi[i + 1]) / 2, (pathi[i + 1] +\n pathi[i + 2]) / 2, 20))\n for points in range(len(x)):\n totalx.append(x[points])\n totaly.append(variables[0] * x[points] ** 4 + variables[1] * x[\n points] ** 3 + variables[2] * x[points] ** 2 + variables[3] * x\n [points] ** 1 + variables[4])\nx = np.linspace((pathi[-1] + pathi[-2]) / 2, pathi[-1], 20)\ny = np.linspace((pathj[-1] + pathj[-2]) / 2, pathj[-1], 20)\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nax.plot(totalx, totaly, color='blue')\ntotalxdash = np.gradient(totalx)\ntotalydash = np.gradient(totaly)\ntotalydashdash = np.gradient(totalydash)\nR = []\nfor radius in range(len(totaly)):\n R.append(1 / (totalydashdash[radius] * (1 + totalydash[radius] ** 2) **\n 1.5))\nl = 1.5\ntheta = []\nfor radii in R:\n theta.append(round(np.degrees(np.arctan(l / radii)), 4))\nprint('x=', totalx)\nprint('theta=', theta)\nplt.plot(start[0], start[1], 'o', color='red')\nplt.plot(goal[0], goal[1], 'o', color='green')\nplt.legend(['Eliminate Redundant Nodes', 'Smooth Curve', 'Start', 'Goal'])\nplt.show()\n",
"<import token>\n\n\ndef inside_obstacle(point, obstacle):\n \"\"\"\n returns 1 if the point is inside any obstacles\n 0 otherwise\n \"\"\"\n for obs in obstacle:\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][\n 0] and point[1] < obs[1][2]:\n return 1\n return 0\n\n\ndef through_obstacle(line, obstacles):\n \"\"\"\n returns 1 if the line goes through any obstacles\n 0 otherwise\n \"\"\"\n noofpoints = 100\n for i in range(noofpoints):\n if inside_obstacle((line[0] + i * (line[2] - line[0]) / noofpoints,\n line[1] + i * (line[3] - line[1]) / noofpoints), obstacles) == 1:\n return 1\n return 0\n\n\n<assignment token>\nwith open('IFS.txt', 'r') as f:\n for line in f:\n for ele in range(len(line)):\n if line[ele] == '\\t':\n br = ele\n break\n pathi.append(float(line[0:br]))\n pathj.append(float(line[br + 1:-2]))\n<assignment token>\nfor i in range(len(finalpath)):\n f.append(list(finalpath[i]))\n<assignment token>\nnewfinalpath.append(finalpath[0])\nwhile str(newfinalpath[-1]) != str(finalpath[-1]):\n print(newfinalpath[-1])\n indx = finalpath.index(newfinalpath[-1])\n for i in range(indx, len(finalpath)):\n if i == len(finalpath) - 1:\n newfinalpath.append(finalpath[-1])\n break\n if through_obstacle((finalpath[indx][0], finalpath[indx][1],\n finalpath[i][0], finalpath[i][1]), obstacles) == 1:\n newfinalpath.append(finalpath[i - 1])\n break\n<assignment token>\nax.plot(*newfinalpath, color='orange')\nfor obs in obstacles:\n ax.fill(*obs, 'k', alpha=1)\nplt.xlim(*xlimits)\nplt.ylim(*ylimits)\n<assignment token>\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nfor i in range(int(len(pathi) - 2)):\n f1 = a * ((pathi[i] + pathi[i + 1]) / 2) ** 4 + b * ((pathi[i] + pathi[\n i + 1]) / 2) ** 3 + c * ((pathi[i] + pathi[i + 1]) / 2) ** 2 + d * ((\n pathi[i] + pathi[i + 1]) / 2) ** 1 + e - (pathj[i] + pathj[i + 1]) / 2\n f2 = a * pathi[i + 1] ** 4 + b * pathi[i + 1] ** 3 + c * pathi[i + 1\n ] ** 2 + d * pathi[i + 1] ** 1 + e - pathj[i + 1]\n f3 = a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 4 + b * ((pathi[i + 1] +\n pathi[i + 2]) / 2) ** 3 + c * ((pathi[i + 1] + pathi[i + 2]) / 2\n ) ** 2 + d * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 1 + e - (pathj[\n i + 1] + pathj[i + 2]) / 2\n f4 = 4 * a * ((pathi[i] + pathi[i + 1]) / 2) ** 3 + 3 * b * ((pathi[i] +\n pathi[i + 1]) / 2) ** 2 + 2 * c * ((pathi[i] + pathi[i + 1]) / 2\n ) ** 1 + d - (pathj[i + 1] - pathj[i]) / (pathi[i + 1] - pathi[i])\n f5 = 4 * a * ((pathi[i + 1] + pathi[i + 2]) / 2) ** 3 + 3 * b * ((pathi\n [i + 1] + pathi[i + 2]) / 2) ** 2 + 2 * c * ((pathi[i + 1] + pathi[\n i + 2]) / 2) ** 1 + d - (pathj[i + 2] - pathj[i + 1]) / (pathi[i + \n 2] - pathi[i + 1])\n variables = nsolve((f1, f2, f3, f4, f5), (a, b, c, d, e), (0, 0, 0, 0, 0))\n x = list(np.linspace((pathi[i] + pathi[i + 1]) / 2, (pathi[i + 1] +\n pathi[i + 2]) / 2, 20))\n for points in range(len(x)):\n totalx.append(x[points])\n totaly.append(variables[0] * x[points] ** 4 + variables[1] * x[\n points] ** 3 + variables[2] * x[points] ** 2 + variables[3] * x\n [points] ** 1 + variables[4])\n<assignment token>\nfor points in range(len(x)):\n totalx.append(x[points])\n totaly.append(y[points])\nax.plot(totalx, totaly, color='blue')\n<assignment token>\nfor radius in range(len(totaly)):\n R.append(1 / (totalydashdash[radius] * (1 + totalydash[radius] ** 2) **\n 1.5))\n<assignment token>\nfor radii in R:\n theta.append(round(np.degrees(np.arctan(l / radii)), 4))\nprint('x=', totalx)\nprint('theta=', theta)\nplt.plot(start[0], start[1], 'o', color='red')\nplt.plot(goal[0], goal[1], 'o', color='green')\nplt.legend(['Eliminate Redundant Nodes', 'Smooth Curve', 'Start', 'Goal'])\nplt.show()\n",
"<import token>\n\n\ndef inside_obstacle(point, obstacle):\n \"\"\"\n returns 1 if the point is inside any obstacles\n 0 otherwise\n \"\"\"\n for obs in obstacle:\n if point[0] > obs[0][0] and point[0] < obs[0][2] and point[1] > obs[1][\n 0] and point[1] < obs[1][2]:\n return 1\n return 0\n\n\ndef through_obstacle(line, obstacles):\n \"\"\"\n returns 1 if the line goes through any obstacles\n 0 otherwise\n \"\"\"\n noofpoints = 100\n for i in range(noofpoints):\n if inside_obstacle((line[0] + i * (line[2] - line[0]) / noofpoints,\n line[1] + i * (line[3] - line[1]) / noofpoints), obstacles) == 1:\n return 1\n return 0\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n\n\ndef through_obstacle(line, obstacles):\n \"\"\"\n returns 1 if the line goes through any obstacles\n 0 otherwise\n \"\"\"\n noofpoints = 100\n for i in range(noofpoints):\n if inside_obstacle((line[0] + i * (line[2] - line[0]) / noofpoints,\n line[1] + i * (line[3] - line[1]) / noofpoints), obstacles) == 1:\n return 1\n return 0\n\n\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,015 |
c37593db9078ed808bd540cdec73e3a145ab287e
|
from sklearn_cv_pandas.pandas_cv import (
RandomizedSearchCV,
GridSearchCV
)
from sklearn_cv_pandas.model import Model
|
[
"from sklearn_cv_pandas.pandas_cv import (\n RandomizedSearchCV,\n GridSearchCV\n)\nfrom sklearn_cv_pandas.model import Model",
"from sklearn_cv_pandas.pandas_cv import RandomizedSearchCV, GridSearchCV\nfrom sklearn_cv_pandas.model import Model\n",
"<import token>\n"
] | false |
99,016 |
57b9450c5edf6143f68358f286a126e6bc22e417
|
#seperate transport pathway
import csv
pathway = []
with open("Pathway.csv", "rb") as f:
reader = csv.reader(f, quotechar='"')
for line in reader:
pathway.append(line)
f.close()
#remove transport pathway and reassign pathway id
new_pathway = []
transport = []
for i in xrange(len(pathway)):
temp = []
if "Transport" in pathway[i][1]:
temp.append(transport_num)
transport.append(pathway[i])
else:
new_pathway.append(pathway[i])
new_pathway.append([int(pathway[-1][0])+1, "Transport", "NULL", ""])
with open("pathway_transport.csv", "wb") as f:
writer = csv.writer(f)
writer.writerows(transport)
f.close()
with open("pathway_remove_transport.csv", "wb") as f:
writer = csv.writer(f)
writer.writerows(new_pathway)
f.close()
|
[
"#seperate transport pathway\nimport csv\npathway = []\nwith open(\"Pathway.csv\", \"rb\") as f:\n reader = csv.reader(f, quotechar='\"')\n for line in reader:\n pathway.append(line)\nf.close()\n\n#remove transport pathway and reassign pathway id\nnew_pathway = []\ntransport = []\n\nfor i in xrange(len(pathway)):\n temp = []\n if \"Transport\" in pathway[i][1]:\n temp.append(transport_num)\n transport.append(pathway[i])\n else:\n new_pathway.append(pathway[i])\n\nnew_pathway.append([int(pathway[-1][0])+1, \"Transport\", \"NULL\", \"\"])\n\nwith open(\"pathway_transport.csv\", \"wb\") as f:\n writer = csv.writer(f)\n writer.writerows(transport)\nf.close()\n\nwith open(\"pathway_remove_transport.csv\", \"wb\") as f:\n writer = csv.writer(f)\n writer.writerows(new_pathway)\nf.close()\n\n",
"import csv\npathway = []\nwith open('Pathway.csv', 'rb') as f:\n reader = csv.reader(f, quotechar='\"')\n for line in reader:\n pathway.append(line)\nf.close()\nnew_pathway = []\ntransport = []\nfor i in xrange(len(pathway)):\n temp = []\n if 'Transport' in pathway[i][1]:\n temp.append(transport_num)\n transport.append(pathway[i])\n else:\n new_pathway.append(pathway[i])\nnew_pathway.append([int(pathway[-1][0]) + 1, 'Transport', 'NULL', ''])\nwith open('pathway_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(transport)\nf.close()\nwith open('pathway_remove_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(new_pathway)\nf.close()\n",
"<import token>\npathway = []\nwith open('Pathway.csv', 'rb') as f:\n reader = csv.reader(f, quotechar='\"')\n for line in reader:\n pathway.append(line)\nf.close()\nnew_pathway = []\ntransport = []\nfor i in xrange(len(pathway)):\n temp = []\n if 'Transport' in pathway[i][1]:\n temp.append(transport_num)\n transport.append(pathway[i])\n else:\n new_pathway.append(pathway[i])\nnew_pathway.append([int(pathway[-1][0]) + 1, 'Transport', 'NULL', ''])\nwith open('pathway_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(transport)\nf.close()\nwith open('pathway_remove_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(new_pathway)\nf.close()\n",
"<import token>\n<assignment token>\nwith open('Pathway.csv', 'rb') as f:\n reader = csv.reader(f, quotechar='\"')\n for line in reader:\n pathway.append(line)\nf.close()\n<assignment token>\nfor i in xrange(len(pathway)):\n temp = []\n if 'Transport' in pathway[i][1]:\n temp.append(transport_num)\n transport.append(pathway[i])\n else:\n new_pathway.append(pathway[i])\nnew_pathway.append([int(pathway[-1][0]) + 1, 'Transport', 'NULL', ''])\nwith open('pathway_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(transport)\nf.close()\nwith open('pathway_remove_transport.csv', 'wb') as f:\n writer = csv.writer(f)\n writer.writerows(new_pathway)\nf.close()\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,017 |
37664b159b86645ae32be7835e7fc511fbc4202f
|
from __future__ import print_function
import unittest
import os
import sys
here = os.path.dirname(os.path.abspath(__file__))
module_path = os.path.abspath(os.path.join(here, os.pardir))
sys.path.insert(0, module_path)
class TestStations(unittest.TestCase):
@classmethod
def setUp(self):
from vbb.client import VBBService
self.client = VBBService()
def test_get_station(self):
station = self.client.stations.get_station_by_id(id=900000013102)
self.assertIsInstance(station, dict)
def test_get_nearby_stations(self):
station = self.client.stations.get_nearby_stations(latitude=52.52725, longitude=13.4123)
self.assertIsNotNone(station)
def test_get_station_departures(self):
station = self.client.stations.get_station_departures(id=900000013102)
self.assertIsNotNone(station)
def test_get_all_stations(self):
stations = self.client.stations.get_all_stations()
self.assertEqual(len(stations), 13098)
|
[
"from __future__ import print_function\nimport unittest\n\nimport os\nimport sys\n\nhere = os.path.dirname(os.path.abspath(__file__))\nmodule_path = os.path.abspath(os.path.join(here, os.pardir))\nsys.path.insert(0, module_path)\n\n\nclass TestStations(unittest.TestCase):\n\n @classmethod\n def setUp(self):\n from vbb.client import VBBService\n self.client = VBBService()\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)",
"from __future__ import print_function\nimport unittest\nimport os\nimport sys\nhere = os.path.dirname(os.path.abspath(__file__))\nmodule_path = os.path.abspath(os.path.join(here, os.pardir))\nsys.path.insert(0, module_path)\n\n\nclass TestStations(unittest.TestCase):\n\n @classmethod\n def setUp(self):\n from vbb.client import VBBService\n self.client = VBBService()\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\nhere = os.path.dirname(os.path.abspath(__file__))\nmodule_path = os.path.abspath(os.path.join(here, os.pardir))\nsys.path.insert(0, module_path)\n\n\nclass TestStations(unittest.TestCase):\n\n @classmethod\n def setUp(self):\n from vbb.client import VBBService\n self.client = VBBService()\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\nsys.path.insert(0, module_path)\n\n\nclass TestStations(unittest.TestCase):\n\n @classmethod\n def setUp(self):\n from vbb.client import VBBService\n self.client = VBBService()\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n\n @classmethod\n def setUp(self):\n from vbb.client import VBBService\n self.client = VBBService()\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n <function token>\n\n def test_get_station(self):\n station = self.client.stations.get_station_by_id(id=900000013102)\n self.assertIsInstance(station, dict)\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n <function token>\n <function token>\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n\n def test_get_station_departures(self):\n station = self.client.stations.get_station_departures(id=900000013102)\n self.assertIsNotNone(station)\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n <function token>\n <function token>\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n <function token>\n\n def test_get_all_stations(self):\n stations = self.client.stations.get_all_stations()\n self.assertEqual(len(stations), 13098)\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n <function token>\n <function token>\n\n def test_get_nearby_stations(self):\n station = self.client.stations.get_nearby_stations(latitude=\n 52.52725, longitude=13.4123)\n self.assertIsNotNone(station)\n <function token>\n <function token>\n",
"<import token>\n<assignment token>\n<code token>\n\n\nclass TestStations(unittest.TestCase):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<assignment token>\n<code token>\n<class token>\n"
] | false |
99,018 |
e59277325c28f99fe833d4461fc8a43f1b1a4924
|
from tkinter import *
from tkinter import ttk
import requests
import xmltodict
import json
import csv
import APITM
from fdb import services
import fdb
import time
import threading
from tkinter import messagebox
def Quit():
global root
root.destroy()
root.quit()
def sms_Quit():
global sms_root
sms_root.destroy()
sms_root.quit()
def multifon_main_quit():
global multifon_main
multifon_main.destroy()
multifon_main.quit()
def poisk_region_quit():
global poisk_region_main
poisk_region_main.destroy()
poisk_region_main.quit()
def poisk_region_coords_quit():
global poisk_region_coords
poisk_region_coords.destroy()
poisk_region_coords.quit()
def sms():
def make_menu(w):
global the_menu
the_menu = Menu(w, tearoff=0)
the_menu.add_command(label="Cut")
the_menu.add_command(label="Copy")
the_menu.add_command(label="Paste")
def show_menu(e):
w = e.widget
the_menu.entryconfigure("Cut", command=lambda: w.event_generate("<<Cut>>"))
the_menu.entryconfigure("Copy", command=lambda: w.event_generate("<<Copy>>"))
the_menu.entryconfigure("Paste", command=lambda: w.event_generate("<<Paste>>"))
the_menu.tk.call("tk_popup", the_menu, e.x_root, e.y_root)
def paste_clipboard(event):
event.widget.delete(0, 'end')
event.widget.insert(0, sms_root.clipboard_get())
def smssend():
global dict_entry
r = requests.get('http://smsc.ru/sys/send.php?'
'login=' + dict_entry['login'].get() +
'&psw=' + dict_entry['passw'].get() +
'&phones=' + dict_entry['phone'].get() +
'&mes=' + dict_entry['msg'].get())
label.config(text=r.text)
global sms_root
sms_root = Tk()
make_menu(sms_root)
sms_root.title("Отправка СМС")
sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)
global dict_entry
dict_entry={}
for s in ["login", "passw", "phone","msg"]:
key =s
s = ttk.Entry(sms_root);s.pack()
s1 = ttk.Label(sms_root,text=key);s1.pack()
s.bind('<ButtonRelease-2>', paste_clipboard)
s.bind('<ButtonRelease-3>', show_menu)
dict_entry [key]=s
label = ttk.Label(sms_root);label.pack()
ttk.Button(sms_root, text="Отправить", command=smssend).pack()
ttk.Button(sms_root,text="ВЫХОД(EXIT)", command=sms_Quit).pack()
sms_root.focus_set()
sms_root.mainloop()
def multifon():
global dict_entry
global multifon_main
global var1
def multifon_routing():
global dict_entry
r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' + dict_entry['number'].get() + '@multifon.ru&password=' + dict_entry['passw'].get())
json_r = xmltodict.parse(r.text)
try:
if json_r['response']['routing'] == '1':
label_chek.config(text='только в «МультиФон»')
elif json_r['response']['routing'] == '0':
label_chek.config(text='только телефон')
elif json_r['response']['routing'] == '2':
label_chek.config(text='телефон и «МультиФон»')
except KeyError:
label_chek.config(text=json_r['response']['result']['description'])
def multifon_set_routing():
global dict_entry
r = requests.get('https://sm.megafon.ru/sm/client/routing/set?login='
+ dict_entry['number'].get()
+ '@multifon.ru&password='
+ dict_entry['passw'].get()
+ '&routing=' + str(var1.get()))
json_r = xmltodict.parse(r.text)
label_set.config(text='Результат = ' + json_r['response']['result']['description'])
def make_menu(w):
global the_menu
the_menu = Menu(w, tearoff=0)
the_menu.add_command(label="Cut")
the_menu.add_command(label="Copy")
the_menu.add_command(label="Paste")
def show_menu(e):
w = e.widget
the_menu.entryconfigure("Cut", command=lambda: w.event_generate("<<Cut>>"))
the_menu.entryconfigure("Copy", command=lambda: w.event_generate("<<Copy>>"))
the_menu.entryconfigure("Paste", command=lambda: w.event_generate("<<Paste>>"))
the_menu.tk.call("tk_popup", the_menu, e.x_root, e.y_root)
def paste_clipboard(event):
event.widget.delete(0, 'end')
event.widget.insert(0, multifon_main.clipboard_get())
multifon_main=Tk()
make_menu(multifon_main)
dict_entry={}
for s in ["number", "passw"]:
key =s
s = ttk.Entry(multifon_main,text=key);s.pack()
s1 = ttk.Label(multifon_main, text=key); s1.pack()
s.bind('<ButtonRelease-2>', paste_clipboard)
s.bind('<ButtonRelease-3>', show_menu)
dict_entry [key]=s
ttk.Button(multifon_main, text="Проверить", command=multifon_routing).pack()
label_chek = ttk.Label(multifon_main);label_chek.pack()
ttk.Button(multifon_main, text="Переключить", command=multifon_set_routing).pack()
var1 = IntVar(multifon_main)
ttk.Radiobutton(multifon_main, text=r'только телефон', variable=var1, value=0).pack(anchor = W)
ttk.Radiobutton(multifon_main, text=r'только в «МультиФон»', variable=var1, value=1).pack(anchor = W)
ttk.Radiobutton(multifon_main, text=r'телефон и «МультиФон»', variable=var1, value=2).pack(anchor = W)
label_set = ttk.Label(multifon_main);label_set.pack()
ttk.Button(multifon_main, text="ВЫХОД(EXIT)", command=multifon_main_quit).pack()
multifon_main.focus_set()
multifon_main.mainloop()
def poisk_region():
def show_menu(e):
w = e.widget
the_menu.entryconfigure("Cut", command=lambda: w.event_generate("<<Cut>>"))
the_menu.entryconfigure("Copy", command=lambda: w.event_generate("<<Copy>>"))
the_menu.entryconfigure("Paste", command=lambda: w.event_generate("<<Paste>>"))
the_menu.tk.call("tk_popup", the_menu, e.x_root, e.y_root)
def make_menu(w):
global the_menu
the_menu = Menu(w, tearoff=0)
the_menu.add_command(label="Cut")
the_menu.add_command(label="Copy")
the_menu.add_command(label="Paste")
global dict_entry
global poisk_region_main
dict_entry={}
poisk_region_main=Tk()
make_menu(poisk_region_main)
def paste_clipboard(event):
event.widget.delete(0, 'end')
event.widget.insert(0, poisk_region_main.clipboard_get())
def poisk():
global dict_entry
r = requests.get('http://catalog.api.2gis.ru/geo/search?q='
+ dict_entry['town'].get() +
'&types=city,settlement'
'&format=short&version=1.3'
'&key=' + dict_entry['key'].get())
decoded = json.loads(r.text)
try:
list = decoded['result']
label_region.config(text='Регион= '+str(list[0]['project_id']))
except:
label_region.config(text='error_message= ' + decoded['error_message']+' '+'\n error_code= ' + decoded['error_code'])
for s in ["town", "key"]:
key =s
s = ttk.Entry(poisk_region_main,text=key);s.pack()
s1 = ttk.Label(poisk_region_main, text=key); s1.pack()
s.bind('<ButtonRelease-2>', paste_clipboard)
s.bind('<ButtonRelease-3>', show_menu)
dict_entry [key]=s
label_region = ttk.Label(poisk_region_main, text='');label_region.pack()
ttk.Button(poisk_region_main, text="Найти", command=poisk).pack()
ttk.Button(poisk_region_main, text="ВЫХОД(EXIT)", command=poisk_region_quit).pack()
poisk_region_main.focus_set()
poisk_region_main.mainloop()
def poisk_region_coords():
global poisk_region_coords
global dict_entry
def paste_clipboard(event):
event.widget.delete(0, 'end')
event.widget.insert(0, poisk_region_coords.clipboard_get())
def show_menu(e):
w = e.widget
the_menu.entryconfigure("Cut", command=lambda: w.event_generate("<<Cut>>"))
the_menu.entryconfigure("Copy", command=lambda: w.event_generate("<<Copy>>"))
the_menu.entryconfigure("Paste", command=lambda: w.event_generate("<<Paste>>"))
the_menu.tk.call("tk_popup", the_menu, e.x_root, e.y_root)
def make_menu(w):
global the_menu
the_menu = Menu(w, tearoff=0)
the_menu.add_command(label="Cut")
the_menu.add_command(label="Copy")
the_menu.add_command(label="Paste")
def poisk_coords():
global dict_entry
global poisk_region_coords
town = dict_entry['town'].get()
key = dict_entry['key'].get()
r = requests.get(
'http://catalog.api.2gis.ru/geo/search?q=' + town + '&types=city,settlement&output=xml&version=1.3&key=' + key)
json_r = xmltodict.parse(r.text)
try:
string = str(json_r['root']['result']['geoObject']['selection'])
s = string.find('),(')
string = string.lstrip('MULTIPOLYGON(((')
string = string.replace(')))', '')
string = string.replace('POLYGON((', '')
string = string.replace('))', '')
if s == -1:
q = string.split(' ')
q_last = q.pop()
q_first = q.pop(0)
q_all = q_last + ',' + q_first
q.append(q_all)
with open(town + '.csv', 'w', newline="") as f:
writer = csv.writer(f)
for i in q:
string = i.split(',')
writer.writerow(string)
else:
string = string.split('),(')
i = 0
name = 1
for i in string:
q = i.split(' ')
q_last = q.pop()
q_first = q.pop(0)
q_all = q_last + ',' + q_first
q.append(q_all)
with open(town + str(name) + '.csv', 'w', newline="") as f:
writer = csv.writer(f)
for i in q:
string = i.split(',')
writer.writerow(string)
name = name + 1
messagebox.showinfo('Инфо', 'Все готово')
poisk_region_coords.focus_set()
poisk_region_coords.focus_force()
except:
messagebox.showwarning('Error', 'error_message= ' + json_r['root']['error_message']+'\n'+'error_code= ' + json_r['root']['error_code'] + '\n')
poisk_region_coords.focus_set()
poisk_region_coords.focus_force()
dict_entry={}
poisk_region_coords = Tk()
make_menu(poisk_region_coords)
for s in ["town", "key"]:
key =s
s = ttk.Entry(poisk_region_coords,text=key);s.pack()
s1 = ttk.Label(poisk_region_coords, text=key); s1.pack()
s.bind('<ButtonRelease-2>', paste_clipboard)
s.bind('<ButtonRelease-3>', show_menu)
dict_entry [key]=s
ttk.Button(poisk_region_coords, text="Найти", command=poisk_coords).pack()
ttk.Button(poisk_region_coords, text="ВЫХОД(EXIT)", command=poisk_region_coords_quit).pack()
poisk_region_coords.focus_force()
poisk_region_coords.mainloop()
root = Tk()
root.title("Главное")
'***************************************************'
sms = ttk.Button(text="Отправим смску через смсц", style="C.TButton", command=sms).pack()
multifon = ttk.Button(text="Узнаем роутинг мультифона", style="C.TButton", command=multifon).pack()
gis2 = ttk.Button(text="Поиском региона 2ГИС по городу", style="C.TButton", command=poisk_region).pack()
coords_town = ttk.Button(text="Выгрузим координаты города по названию", style="C.TButton", command=poisk_region_coords).pack()
'***************************************************'
API = ttk.Button(text="Запрос в АПИ ТМ", style="C.TButton").pack()
backup = ttk.Button(text="Бэкап базы *.FDB", style="C.TButton").pack()
oktell = ttk.Button(text="Oktell", style="C.TButton").pack()
tracert = ttk.Button(text="Запустить Tracert", style="C.TButton").pack()
exit = ttk.Button(text="ВЫХОД(EXIT)", style="C.TButton", command=Quit).pack()
root.protocol('WM_DELETE_WINDOW', Quit)
root.mainloop()
|
[
"from tkinter import *\nfrom tkinter import ttk\nimport requests\nimport xmltodict\nimport json\nimport csv\nimport APITM\nfrom fdb import services\nimport fdb\nimport time\nimport threading\nfrom tkinter import messagebox\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\ndef poisk_region_quit():\n global poisk_region_main\n poisk_region_main.destroy()\n poisk_region_main.quit()\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\ndef sms():\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label=\"Cut\")\n the_menu.add_command(label=\"Copy\")\n the_menu.add_command(label=\"Paste\")\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure(\"Cut\", command=lambda: w.event_generate(\"<<Cut>>\"))\n the_menu.entryconfigure(\"Copy\", command=lambda: w.event_generate(\"<<Copy>>\"))\n the_menu.entryconfigure(\"Paste\", command=lambda: w.event_generate(\"<<Paste>>\"))\n the_menu.tk.call(\"tk_popup\", the_menu, e.x_root, e.y_root)\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?'\n 'login=' + dict_entry['login'].get() +\n '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() +\n '&mes=' + dict_entry['msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title(\"Отправка СМС\")\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry={}\n for s in [\"login\", \"passw\", \"phone\",\"msg\"]:\n key =s\n s = ttk.Entry(sms_root);s.pack()\n s1 = ttk.Label(sms_root,text=key);s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry [key]=s\n label = ttk.Label(sms_root);label.pack()\n ttk.Button(sms_root, text=\"Отправить\", command=smssend).pack()\n ttk.Button(sms_root,text=\"ВЫХОД(EXIT)\", command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' + dict_entry['number'].get() + '@multifon.ru&password=' + dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n def multifon_set_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing/set?login='\n + dict_entry['number'].get()\n + '@multifon.ru&password='\n + dict_entry['passw'].get()\n + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']['description'])\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label=\"Cut\")\n the_menu.add_command(label=\"Copy\")\n the_menu.add_command(label=\"Paste\")\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure(\"Cut\", command=lambda: w.event_generate(\"<<Cut>>\"))\n the_menu.entryconfigure(\"Copy\", command=lambda: w.event_generate(\"<<Copy>>\"))\n the_menu.entryconfigure(\"Paste\", command=lambda: w.event_generate(\"<<Paste>>\"))\n the_menu.tk.call(\"tk_popup\", the_menu, e.x_root, e.y_root)\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n\n multifon_main=Tk()\n make_menu(multifon_main)\n dict_entry={}\n\n\n for s in [\"number\", \"passw\"]:\n key =s\n s = ttk.Entry(multifon_main,text=key);s.pack()\n s1 = ttk.Label(multifon_main, text=key); s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry [key]=s\n ttk.Button(multifon_main, text=\"Проверить\", command=multifon_routing).pack()\n label_chek = ttk.Label(multifon_main);label_chek.pack()\n ttk.Button(multifon_main, text=\"Переключить\", command=multifon_set_routing).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text=r'только телефон', variable=var1, value=0).pack(anchor = W)\n ttk.Radiobutton(multifon_main, text=r'только в «МультиФон»', variable=var1, value=1).pack(anchor = W)\n ttk.Radiobutton(multifon_main, text=r'телефон и «МультиФон»', variable=var1, value=2).pack(anchor = W)\n label_set = ttk.Label(multifon_main);label_set.pack()\n ttk.Button(multifon_main, text=\"ВЫХОД(EXIT)\", command=multifon_main_quit).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\ndef poisk_region():\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure(\"Cut\", command=lambda: w.event_generate(\"<<Cut>>\"))\n the_menu.entryconfigure(\"Copy\", command=lambda: w.event_generate(\"<<Copy>>\"))\n the_menu.entryconfigure(\"Paste\", command=lambda: w.event_generate(\"<<Paste>>\"))\n the_menu.tk.call(\"tk_popup\", the_menu, e.x_root, e.y_root)\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label=\"Cut\")\n the_menu.add_command(label=\"Copy\")\n the_menu.add_command(label=\"Paste\")\n global dict_entry\n global poisk_region_main\n dict_entry={}\n poisk_region_main=Tk()\n make_menu(poisk_region_main)\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q='\n + dict_entry['town'].get() +\n '&types=city,settlement'\n '&format=short&version=1.3'\n '&key=' + dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= '+str(list[0]['project_id']))\n except:\n\n label_region.config(text='error_message= ' + decoded['error_message']+' '+'\\n error_code= ' + decoded['error_code'])\n\n for s in [\"town\", \"key\"]:\n key =s\n s = ttk.Entry(poisk_region_main,text=key);s.pack()\n s1 = ttk.Label(poisk_region_main, text=key); s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry [key]=s\n label_region = ttk.Label(poisk_region_main, text='');label_region.pack()\n ttk.Button(poisk_region_main, text=\"Найти\", command=poisk).pack()\n ttk.Button(poisk_region_main, text=\"ВЫХОД(EXIT)\", command=poisk_region_quit).pack()\n\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure(\"Cut\", command=lambda: w.event_generate(\"<<Cut>>\"))\n the_menu.entryconfigure(\"Copy\", command=lambda: w.event_generate(\"<<Copy>>\"))\n the_menu.entryconfigure(\"Paste\", command=lambda: w.event_generate(\"<<Paste>>\"))\n the_menu.tk.call(\"tk_popup\", the_menu, e.x_root, e.y_root)\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label=\"Cut\")\n the_menu.add_command(label=\"Copy\")\n the_menu.add_command(label=\"Paste\")\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get(\n 'http://catalog.api.2gis.ru/geo/search?q=' + town + '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline=\"\") as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline=\"\") as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r['root']['error_message']+'\\n'+'error_code= ' + json_r['root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n\n dict_entry={}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in [\"town\", \"key\"]:\n key =s\n s = ttk.Entry(poisk_region_coords,text=key);s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key); s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry [key]=s\n ttk.Button(poisk_region_coords, text=\"Найти\", command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text=\"ВЫХОД(EXIT)\", command=poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\nroot = Tk()\nroot.title(\"Главное\")\n'***************************************************'\nsms = ttk.Button(text=\"Отправим смску через смсц\", style=\"C.TButton\", command=sms).pack()\nmultifon = ttk.Button(text=\"Узнаем роутинг мультифона\", style=\"C.TButton\", command=multifon).pack()\ngis2 = ttk.Button(text=\"Поиском региона 2ГИС по городу\", style=\"C.TButton\", command=poisk_region).pack()\ncoords_town = ttk.Button(text=\"Выгрузим координаты города по названию\", style=\"C.TButton\", command=poisk_region_coords).pack()\n'***************************************************'\nAPI = ttk.Button(text=\"Запрос в АПИ ТМ\", style=\"C.TButton\").pack()\nbackup = ttk.Button(text=\"Бэкап базы *.FDB\", style=\"C.TButton\").pack()\noktell = ttk.Button(text=\"Oktell\", style=\"C.TButton\").pack()\ntracert = ttk.Button(text=\"Запустить Tracert\", style=\"C.TButton\").pack()\nexit = ttk.Button(text=\"ВЫХОД(EXIT)\", style=\"C.TButton\", command=Quit).pack()\nroot.protocol('WM_DELETE_WINDOW', Quit)\nroot.mainloop()",
"from tkinter import *\nfrom tkinter import ttk\nimport requests\nimport xmltodict\nimport json\nimport csv\nimport APITM\nfrom fdb import services\nimport fdb\nimport time\nimport threading\nfrom tkinter import messagebox\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\ndef poisk_region_quit():\n global poisk_region_main\n poisk_region_main.destroy()\n poisk_region_main.quit()\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\ndef poisk_region():\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n global dict_entry\n global poisk_region_main\n dict_entry = {}\n poisk_region_main = Tk()\n make_menu(poisk_region_main)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' +\n dict_entry['town'].get() +\n '&types=city,settlement&format=short&version=1.3&key=' +\n dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= ' + str(list[0]['project_id']))\n except:\n label_region.config(text='error_message= ' + decoded[\n 'error_message'] + ' ' + '\\n error_code= ' + decoded[\n 'error_code'])\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_main, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label_region = ttk.Label(poisk_region_main, text='')\n label_region.pack()\n ttk.Button(poisk_region_main, text='Найти', command=poisk).pack()\n ttk.Button(poisk_region_main, text='ВЫХОД(EXIT)', command=poisk_region_quit\n ).pack()\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\nroot = Tk()\nroot.title('Главное')\n<docstring token>\nsms = ttk.Button(text='Отправим смску через смсц', style='C.TButton',\n command=sms).pack()\nmultifon = ttk.Button(text='Узнаем роутинг мультифона', style='C.TButton',\n command=multifon).pack()\ngis2 = ttk.Button(text='Поиском региона 2ГИС по городу', style='C.TButton',\n command=poisk_region).pack()\ncoords_town = ttk.Button(text='Выгрузим координаты города по названию',\n style='C.TButton', command=poisk_region_coords).pack()\n<docstring token>\nAPI = ttk.Button(text='Запрос в АПИ ТМ', style='C.TButton').pack()\nbackup = ttk.Button(text='Бэкап базы *.FDB', style='C.TButton').pack()\noktell = ttk.Button(text='Oktell', style='C.TButton').pack()\ntracert = ttk.Button(text='Запустить Tracert', style='C.TButton').pack()\nexit = ttk.Button(text='ВЫХОД(EXIT)', style='C.TButton', command=Quit).pack()\nroot.protocol('WM_DELETE_WINDOW', Quit)\nroot.mainloop()\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\ndef poisk_region_quit():\n global poisk_region_main\n poisk_region_main.destroy()\n poisk_region_main.quit()\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\ndef poisk_region():\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n global dict_entry\n global poisk_region_main\n dict_entry = {}\n poisk_region_main = Tk()\n make_menu(poisk_region_main)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' +\n dict_entry['town'].get() +\n '&types=city,settlement&format=short&version=1.3&key=' +\n dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= ' + str(list[0]['project_id']))\n except:\n label_region.config(text='error_message= ' + decoded[\n 'error_message'] + ' ' + '\\n error_code= ' + decoded[\n 'error_code'])\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_main, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label_region = ttk.Label(poisk_region_main, text='')\n label_region.pack()\n ttk.Button(poisk_region_main, text='Найти', command=poisk).pack()\n ttk.Button(poisk_region_main, text='ВЫХОД(EXIT)', command=poisk_region_quit\n ).pack()\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\nroot = Tk()\nroot.title('Главное')\n<docstring token>\nsms = ttk.Button(text='Отправим смску через смсц', style='C.TButton',\n command=sms).pack()\nmultifon = ttk.Button(text='Узнаем роутинг мультифона', style='C.TButton',\n command=multifon).pack()\ngis2 = ttk.Button(text='Поиском региона 2ГИС по городу', style='C.TButton',\n command=poisk_region).pack()\ncoords_town = ttk.Button(text='Выгрузим координаты города по названию',\n style='C.TButton', command=poisk_region_coords).pack()\n<docstring token>\nAPI = ttk.Button(text='Запрос в АПИ ТМ', style='C.TButton').pack()\nbackup = ttk.Button(text='Бэкап базы *.FDB', style='C.TButton').pack()\noktell = ttk.Button(text='Oktell', style='C.TButton').pack()\ntracert = ttk.Button(text='Запустить Tracert', style='C.TButton').pack()\nexit = ttk.Button(text='ВЫХОД(EXIT)', style='C.TButton', command=Quit).pack()\nroot.protocol('WM_DELETE_WINDOW', Quit)\nroot.mainloop()\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\ndef poisk_region_quit():\n global poisk_region_main\n poisk_region_main.destroy()\n poisk_region_main.quit()\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\ndef poisk_region():\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n global dict_entry\n global poisk_region_main\n dict_entry = {}\n poisk_region_main = Tk()\n make_menu(poisk_region_main)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' +\n dict_entry['town'].get() +\n '&types=city,settlement&format=short&version=1.3&key=' +\n dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= ' + str(list[0]['project_id']))\n except:\n label_region.config(text='error_message= ' + decoded[\n 'error_message'] + ' ' + '\\n error_code= ' + decoded[\n 'error_code'])\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_main, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label_region = ttk.Label(poisk_region_main, text='')\n label_region.pack()\n ttk.Button(poisk_region_main, text='Найти', command=poisk).pack()\n ttk.Button(poisk_region_main, text='ВЫХОД(EXIT)', command=poisk_region_quit\n ).pack()\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\n<assignment token>\nroot.title('Главное')\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\nroot.protocol('WM_DELETE_WINDOW', Quit)\nroot.mainloop()\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\ndef poisk_region_quit():\n global poisk_region_main\n poisk_region_main.destroy()\n poisk_region_main.quit()\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\ndef poisk_region():\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n global dict_entry\n global poisk_region_main\n dict_entry = {}\n poisk_region_main = Tk()\n make_menu(poisk_region_main)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' +\n dict_entry['town'].get() +\n '&types=city,settlement&format=short&version=1.3&key=' +\n dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= ' + str(list[0]['project_id']))\n except:\n label_region.config(text='error_message= ' + decoded[\n 'error_message'] + ' ' + '\\n error_code= ' + decoded[\n 'error_code'])\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_main, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label_region = ttk.Label(poisk_region_main, text='')\n label_region.pack()\n ttk.Button(poisk_region_main, text='Найти', command=poisk).pack()\n ttk.Button(poisk_region_main, text='ВЫХОД(EXIT)', command=poisk_region_quit\n ).pack()\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\ndef poisk_region():\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n global dict_entry\n global poisk_region_main\n dict_entry = {}\n poisk_region_main = Tk()\n make_menu(poisk_region_main)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_main.clipboard_get())\n\n def poisk():\n global dict_entry\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' +\n dict_entry['town'].get() +\n '&types=city,settlement&format=short&version=1.3&key=' +\n dict_entry['key'].get())\n decoded = json.loads(r.text)\n try:\n list = decoded['result']\n label_region.config(text='Регион= ' + str(list[0]['project_id']))\n except:\n label_region.config(text='error_message= ' + decoded[\n 'error_message'] + ' ' + '\\n error_code= ' + decoded[\n 'error_code'])\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_main, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label_region = ttk.Label(poisk_region_main, text='')\n label_region.pack()\n ttk.Button(poisk_region_main, text='Найти', command=poisk).pack()\n ttk.Button(poisk_region_main, text='ВЫХОД(EXIT)', command=poisk_region_quit\n ).pack()\n poisk_region_main.focus_set()\n poisk_region_main.mainloop()\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\n<function token>\n\n\ndef poisk_region_coords():\n global poisk_region_coords\n global dict_entry\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, poisk_region_coords.clipboard_get())\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def poisk_coords():\n global dict_entry\n global poisk_region_coords\n town = dict_entry['town'].get()\n key = dict_entry['key'].get()\n r = requests.get('http://catalog.api.2gis.ru/geo/search?q=' + town +\n '&types=city,settlement&output=xml&version=1.3&key=' + key)\n json_r = xmltodict.parse(r.text)\n try:\n string = str(json_r['root']['result']['geoObject']['selection'])\n s = string.find('),(')\n string = string.lstrip('MULTIPOLYGON(((')\n string = string.replace(')))', '')\n string = string.replace('POLYGON((', '')\n string = string.replace('))', '')\n if s == -1:\n q = string.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n else:\n string = string.split('),(')\n i = 0\n name = 1\n for i in string:\n q = i.split(' ')\n q_last = q.pop()\n q_first = q.pop(0)\n q_all = q_last + ',' + q_first\n q.append(q_all)\n with open(town + str(name) + '.csv', 'w', newline='') as f:\n writer = csv.writer(f)\n for i in q:\n string = i.split(',')\n writer.writerow(string)\n name = name + 1\n messagebox.showinfo('Инфо', 'Все готово')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n except:\n messagebox.showwarning('Error', 'error_message= ' + json_r[\n 'root']['error_message'] + '\\n' + 'error_code= ' + json_r[\n 'root']['error_code'] + '\\n')\n poisk_region_coords.focus_set()\n poisk_region_coords.focus_force()\n dict_entry = {}\n poisk_region_coords = Tk()\n make_menu(poisk_region_coords)\n for s in ['town', 'key']:\n key = s\n s = ttk.Entry(poisk_region_coords, text=key)\n s.pack()\n s1 = ttk.Label(poisk_region_coords, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(poisk_region_coords, text='Найти', command=poisk_coords).pack()\n ttk.Button(poisk_region_coords, text='ВЫХОД(EXIT)', command=\n poisk_region_coords_quit).pack()\n poisk_region_coords.focus_force()\n poisk_region_coords.mainloop()\n\n\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\ndef sms_Quit():\n global sms_root\n sms_root.destroy()\n sms_root.quit()\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\n<function token>\n\n\ndef multifon_main_quit():\n global multifon_main\n multifon_main.destroy()\n multifon_main.quit()\n\n\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\ndef multifon():\n global dict_entry\n global multifon_main\n global var1\n\n def multifon_routing():\n global dict_entry\n r = requests.get('https://sm.megafon.ru/sm/client/routing?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get())\n json_r = xmltodict.parse(r.text)\n try:\n if json_r['response']['routing'] == '1':\n label_chek.config(text='только в «МультиФон»')\n elif json_r['response']['routing'] == '0':\n label_chek.config(text='только телефон')\n elif json_r['response']['routing'] == '2':\n label_chek.config(text='телефон и «МультиФон»')\n except KeyError:\n label_chek.config(text=json_r['response']['result']['description'])\n\n def multifon_set_routing():\n global dict_entry\n r = requests.get(\n 'https://sm.megafon.ru/sm/client/routing/set?login=' +\n dict_entry['number'].get() + '@multifon.ru&password=' +\n dict_entry['passw'].get() + '&routing=' + str(var1.get()))\n json_r = xmltodict.parse(r.text)\n label_set.config(text='Результат = ' + json_r['response']['result']\n ['description'])\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, multifon_main.clipboard_get())\n multifon_main = Tk()\n make_menu(multifon_main)\n dict_entry = {}\n for s in ['number', 'passw']:\n key = s\n s = ttk.Entry(multifon_main, text=key)\n s.pack()\n s1 = ttk.Label(multifon_main, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n ttk.Button(multifon_main, text='Проверить', command=multifon_routing).pack(\n )\n label_chek = ttk.Label(multifon_main)\n label_chek.pack()\n ttk.Button(multifon_main, text='Переключить', command=multifon_set_routing\n ).pack()\n var1 = IntVar(multifon_main)\n ttk.Radiobutton(multifon_main, text='только телефон', variable=var1,\n value=0).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='только в «МультиФон»', variable=\n var1, value=1).pack(anchor=W)\n ttk.Radiobutton(multifon_main, text='телефон и «МультиФон»', variable=\n var1, value=2).pack(anchor=W)\n label_set = ttk.Label(multifon_main)\n label_set.pack()\n ttk.Button(multifon_main, text='ВЫХОД(EXIT)', command=multifon_main_quit\n ).pack()\n multifon_main.focus_set()\n multifon_main.mainloop()\n\n\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef poisk_region_coords_quit():\n global poisk_region_coords\n poisk_region_coords.destroy()\n poisk_region_coords.quit()\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef sms():\n\n def make_menu(w):\n global the_menu\n the_menu = Menu(w, tearoff=0)\n the_menu.add_command(label='Cut')\n the_menu.add_command(label='Copy')\n the_menu.add_command(label='Paste')\n\n def show_menu(e):\n w = e.widget\n the_menu.entryconfigure('Cut', command=lambda : w.event_generate(\n '<<Cut>>'))\n the_menu.entryconfigure('Copy', command=lambda : w.event_generate(\n '<<Copy>>'))\n the_menu.entryconfigure('Paste', command=lambda : w.event_generate(\n '<<Paste>>'))\n the_menu.tk.call('tk_popup', the_menu, e.x_root, e.y_root)\n\n def paste_clipboard(event):\n event.widget.delete(0, 'end')\n event.widget.insert(0, sms_root.clipboard_get())\n\n def smssend():\n global dict_entry\n r = requests.get('http://smsc.ru/sys/send.php?login=' + dict_entry[\n 'login'].get() + '&psw=' + dict_entry['passw'].get() +\n '&phones=' + dict_entry['phone'].get() + '&mes=' + dict_entry[\n 'msg'].get())\n label.config(text=r.text)\n global sms_root\n sms_root = Tk()\n make_menu(sms_root)\n sms_root.title('Отправка СМС')\n sms_root.protocol('WM_DELETE_WINDOW', sms_Quit)\n global dict_entry\n dict_entry = {}\n for s in ['login', 'passw', 'phone', 'msg']:\n key = s\n s = ttk.Entry(sms_root)\n s.pack()\n s1 = ttk.Label(sms_root, text=key)\n s1.pack()\n s.bind('<ButtonRelease-2>', paste_clipboard)\n s.bind('<ButtonRelease-3>', show_menu)\n dict_entry[key] = s\n label = ttk.Label(sms_root)\n label.pack()\n ttk.Button(sms_root, text='Отправить', command=smssend).pack()\n ttk.Button(sms_root, text='ВЫХОД(EXIT)', command=sms_Quit).pack()\n sms_root.focus_set()\n sms_root.mainloop()\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n\n\ndef Quit():\n global root\n root.destroy()\n root.quit()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<code token>\n<docstring token>\n<assignment token>\n<docstring token>\n<assignment token>\n<code token>\n"
] | false |
99,019 |
a59fa5ec5ed5ad790e19bd80482300b42a905725
|
from django.conf.urls import url
from quote.views import QuoteListView, QuoteListCreateView, QuoteRUDView
urlpatterns = [
url(r'^quote/$', QuoteListView.as_view(), name='quote'),
url(r'^quote/list/$', QuoteListCreateView.as_view(), name='create'),
url(r'^quote/detail/$', QuoteRUDView.as_view(), name='RUD'),
]
|
[
"from django.conf.urls import url\n\nfrom quote.views import QuoteListView, QuoteListCreateView, QuoteRUDView\n\nurlpatterns = [\n url(r'^quote/$', QuoteListView.as_view(), name='quote'),\n url(r'^quote/list/$', QuoteListCreateView.as_view(), name='create'),\n url(r'^quote/detail/$', QuoteRUDView.as_view(), name='RUD'),\n]",
"from django.conf.urls import url\nfrom quote.views import QuoteListView, QuoteListCreateView, QuoteRUDView\nurlpatterns = [url('^quote/$', QuoteListView.as_view(), name='quote'), url(\n '^quote/list/$', QuoteListCreateView.as_view(), name='create'), url(\n '^quote/detail/$', QuoteRUDView.as_view(), name='RUD')]\n",
"<import token>\nurlpatterns = [url('^quote/$', QuoteListView.as_view(), name='quote'), url(\n '^quote/list/$', QuoteListCreateView.as_view(), name='create'), url(\n '^quote/detail/$', QuoteRUDView.as_view(), name='RUD')]\n",
"<import token>\n<assignment token>\n"
] | false |
99,020 |
2b6621aa64970045e575737523371f5220c7ee1e
|
# ============================================================================
#
# Copyright (C) 2007-2012 Conceptive Engineering bvba. All rights reserved.
# www.conceptive.be / [email protected]
#
# This file is part of the Camelot Library.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file license.txt included in the packaging of
# this file. Please review this information to ensure GNU
# General Public Licensing requirements will be met.
#
# If you are unsure which license is appropriate for your use, please
# visit www.python-camelot.com or contact [email protected]
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# For use of this library in commercial applications, please contact
# [email protected]
#
# ============================================================================
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import Qt
from customeditor import CustomEditor
from camelot.view.art import Icon
default_icon_names = [
'face-angel',
'face-crying',
'face-devilish',
'face-glasses',
'face-grin',
'face-kiss',
'face-monkey',
'face-plain',
'face-sad',
'face-smile',
'face-smile-big',
'face-surprise',
'face-wink',
]
default_icons = list( (icon_name, Icon('tango/16x16/emotes/%s.png'%icon_name)) for icon_name in default_icon_names)
class SmileyEditor(CustomEditor):
def __init__(self,
parent,
editable = True,
icons = default_icons,
field_name = 'icons',
**kwargs):
CustomEditor.__init__(self, parent)
self.setObjectName( field_name )
self.box = QtGui.QComboBox()
self.box.setFrame(True)
self.box.setEditable(False)
self.name_by_position = {0:None}
self.position_by_name = {None:0}
self.box.addItem('')
for i,(icon_name, icon) in enumerate(icons):
self.name_by_position[i+1] = icon_name
self.position_by_name[icon_name] = i+1
self.box.addItem(icon.getQIcon(), '')
self.box.setFixedHeight(self.get_height())
self.setFocusPolicy(Qt.StrongFocus)
layout = QtGui.QHBoxLayout(self)
layout.setContentsMargins( 0, 0, 0, 0)
layout.setSpacing(0)
self.setAutoFillBackground(True)
if not editable:
self.box.setEnabled(False)
else:
self.box.setEnabled(True)
self.box.activated.connect( self.smiley_changed )
layout.addWidget(self.box)
layout.addStretch()
self.setLayout(layout)
def get_value(self):
position = self.box.currentIndex()
return CustomEditor.get_value(self) or self.name_by_position[position]
def set_enabled(self, editable=True):
self.box.setEnabled(editable)
@QtCore.pyqtSlot( int )
def smiley_changed(self, _index ):
self.editingFinished.emit()
def set_value(self, value):
name = CustomEditor.set_value(self, value)
self.box.setCurrentIndex( self.position_by_name[name] )
|
[
"# ============================================================================\n#\n# Copyright (C) 2007-2012 Conceptive Engineering bvba. All rights reserved.\n# www.conceptive.be / [email protected]\n#\n# This file is part of the Camelot Library.\n#\n# This file may be used under the terms of the GNU General Public\n# License version 2.0 as published by the Free Software Foundation\n# and appearing in the file license.txt included in the packaging of\n# this file. Please review this information to ensure GNU\n# General Public Licensing requirements will be met.\n#\n# If you are unsure which license is appropriate for your use, please\n# visit www.python-camelot.com or contact [email protected]\n#\n# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE\n# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.\n#\n# For use of this library in commercial applications, please contact\n# [email protected]\n#\n# ============================================================================\n\nfrom PyQt4 import QtGui, QtCore\nfrom PyQt4.QtCore import Qt\n\nfrom customeditor import CustomEditor\nfrom camelot.view.art import Icon\n\ndefault_icon_names = [\n 'face-angel',\n 'face-crying',\n 'face-devilish',\n 'face-glasses',\n 'face-grin',\n 'face-kiss',\n 'face-monkey',\n 'face-plain',\n 'face-sad',\n 'face-smile',\n 'face-smile-big',\n 'face-surprise',\n 'face-wink',\n]\n\ndefault_icons = list( (icon_name, Icon('tango/16x16/emotes/%s.png'%icon_name)) for icon_name in default_icon_names)\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, \n parent, \n editable = True, \n icons = default_icons, \n field_name = 'icons',\n **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName( field_name )\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {0:None}\n self.position_by_name = {None:0}\n\n self.box.addItem('')\n for i,(icon_name, icon) in enumerate(icons):\n self.name_by_position[i+1] = icon_name\n self.position_by_name[icon_name] = i+1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins( 0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n\n self.box.activated.connect( self.smiley_changed )\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n\n def get_value(self):\n position = self.box.currentIndex()\n return CustomEditor.get_value(self) or self.name_by_position[position]\n\n def set_enabled(self, editable=True):\n self.box.setEnabled(editable)\n\n @QtCore.pyqtSlot( int )\n def smiley_changed(self, _index ):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex( self.position_by_name[name] )\n",
"from PyQt4 import QtGui, QtCore\nfrom PyQt4.QtCore import Qt\nfrom customeditor import CustomEditor\nfrom camelot.view.art import Icon\ndefault_icon_names = ['face-angel', 'face-crying', 'face-devilish',\n 'face-glasses', 'face-grin', 'face-kiss', 'face-monkey', 'face-plain',\n 'face-sad', 'face-smile', 'face-smile-big', 'face-surprise', 'face-wink']\ndefault_icons = list((icon_name, Icon('tango/16x16/emotes/%s.png' %\n icon_name)) for icon_name in default_icon_names)\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n\n def get_value(self):\n position = self.box.currentIndex()\n return CustomEditor.get_value(self) or self.name_by_position[position]\n\n def set_enabled(self, editable=True):\n self.box.setEnabled(editable)\n\n @QtCore.pyqtSlot(int)\n def smiley_changed(self, _index):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\ndefault_icon_names = ['face-angel', 'face-crying', 'face-devilish',\n 'face-glasses', 'face-grin', 'face-kiss', 'face-monkey', 'face-plain',\n 'face-sad', 'face-smile', 'face-smile-big', 'face-surprise', 'face-wink']\ndefault_icons = list((icon_name, Icon('tango/16x16/emotes/%s.png' %\n icon_name)) for icon_name in default_icon_names)\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n\n def get_value(self):\n position = self.box.currentIndex()\n return CustomEditor.get_value(self) or self.name_by_position[position]\n\n def set_enabled(self, editable=True):\n self.box.setEnabled(editable)\n\n @QtCore.pyqtSlot(int)\n def smiley_changed(self, _index):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n\n def get_value(self):\n position = self.box.currentIndex()\n return CustomEditor.get_value(self) or self.name_by_position[position]\n\n def set_enabled(self, editable=True):\n self.box.setEnabled(editable)\n\n @QtCore.pyqtSlot(int)\n def smiley_changed(self, _index):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n <function token>\n\n def set_enabled(self, editable=True):\n self.box.setEnabled(editable)\n\n @QtCore.pyqtSlot(int)\n def smiley_changed(self, _index):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n <function token>\n <function token>\n\n @QtCore.pyqtSlot(int)\n def smiley_changed(self, _index):\n self.editingFinished.emit()\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n\n def __init__(self, parent, editable=True, icons=default_icons,\n field_name='icons', **kwargs):\n CustomEditor.__init__(self, parent)\n self.setObjectName(field_name)\n self.box = QtGui.QComboBox()\n self.box.setFrame(True)\n self.box.setEditable(False)\n self.name_by_position = {(0): None}\n self.position_by_name = {None: 0}\n self.box.addItem('')\n for i, (icon_name, icon) in enumerate(icons):\n self.name_by_position[i + 1] = icon_name\n self.position_by_name[icon_name] = i + 1\n self.box.addItem(icon.getQIcon(), '')\n self.box.setFixedHeight(self.get_height())\n self.setFocusPolicy(Qt.StrongFocus)\n layout = QtGui.QHBoxLayout(self)\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n self.setAutoFillBackground(True)\n if not editable:\n self.box.setEnabled(False)\n else:\n self.box.setEnabled(True)\n self.box.activated.connect(self.smiley_changed)\n layout.addWidget(self.box)\n layout.addStretch()\n self.setLayout(layout)\n <function token>\n <function token>\n <function token>\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_value(self, value):\n name = CustomEditor.set_value(self, value)\n self.box.setCurrentIndex(self.position_by_name[name])\n",
"<import token>\n<assignment token>\n\n\nclass SmileyEditor(CustomEditor):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<assignment token>\n<class token>\n"
] | false |
99,021 |
61c54f1fce4d5f8d0a0af6febc3d6a73dd230b01
|
import csv
from app.analysis.rules import dispatch_filter
from app.main.dao.delivery_sheet_dao import delivery_sheet_dao
from app.main.entity.delivery_item import DeliveryItem
from app.util.uuid_util import UUIDUtil
if __name__ == '__main__':
file = open('output.csv', 'r', encoding='utf-8')
reader = csv.reader(file)
total_items = []
for row in reader:
item = DeliveryItem()
item.delivery_item_no = UUIDUtil.create_id("di")
item.id = row[2]
item.spec = row[6]
item.product_type = row[7]
item.quantity = row[11]
if item.quantity == '':
item.quantity = 0
item.free_pcs = row[12]
if item.free_pcs == '':
item.free_pcs = 0
item.customer_id = row[5]
item.salesman_id = row[14]
item.weight = int(row[3])
item.create_time = row[9]
total_items.append(item)
item_dict = {}
for item in total_items:
item_dict.setdefault(item.id, []).append(item)
for items in item_dict.values():
while items:
sheets = dispatch_filter.filter(items)
if sheets:
for sheet in sheets:
sheet.delivery_no = UUIDUtil.create_id("ds")
sheet.customer_id = sheet.items[0].customer_id
sheet.salesman_id = sheet.items[0].salesman_id
sheet.create_time = sheet.items[0].create_time
sheet.weight = 0
for di in sheet.items:
di.delivery_item_no = UUIDUtil.create_id("di")
di.delivery_no = sheet.delivery_no
sheet.weight += di.weight
delivery_sheet_dao.batch_insert(sheets)
|
[
"import csv\n\nfrom app.analysis.rules import dispatch_filter\nfrom app.main.dao.delivery_sheet_dao import delivery_sheet_dao\nfrom app.main.entity.delivery_item import DeliveryItem\nfrom app.util.uuid_util import UUIDUtil\n\nif __name__ == '__main__':\n file = open('output.csv', 'r', encoding='utf-8')\n reader = csv.reader(file)\n total_items = []\n for row in reader:\n item = DeliveryItem()\n item.delivery_item_no = UUIDUtil.create_id(\"di\")\n item.id = row[2]\n item.spec = row[6]\n item.product_type = row[7]\n item.quantity = row[11]\n if item.quantity == '':\n item.quantity = 0\n item.free_pcs = row[12]\n if item.free_pcs == '':\n item.free_pcs = 0\n item.customer_id = row[5]\n item.salesman_id = row[14]\n item.weight = int(row[3])\n item.create_time = row[9]\n total_items.append(item)\n item_dict = {}\n for item in total_items:\n item_dict.setdefault(item.id, []).append(item)\n for items in item_dict.values():\n while items:\n sheets = dispatch_filter.filter(items)\n if sheets:\n for sheet in sheets:\n sheet.delivery_no = UUIDUtil.create_id(\"ds\")\n sheet.customer_id = sheet.items[0].customer_id\n sheet.salesman_id = sheet.items[0].salesman_id\n sheet.create_time = sheet.items[0].create_time\n sheet.weight = 0\n for di in sheet.items:\n di.delivery_item_no = UUIDUtil.create_id(\"di\")\n di.delivery_no = sheet.delivery_no\n sheet.weight += di.weight\n delivery_sheet_dao.batch_insert(sheets)",
"import csv\nfrom app.analysis.rules import dispatch_filter\nfrom app.main.dao.delivery_sheet_dao import delivery_sheet_dao\nfrom app.main.entity.delivery_item import DeliveryItem\nfrom app.util.uuid_util import UUIDUtil\nif __name__ == '__main__':\n file = open('output.csv', 'r', encoding='utf-8')\n reader = csv.reader(file)\n total_items = []\n for row in reader:\n item = DeliveryItem()\n item.delivery_item_no = UUIDUtil.create_id('di')\n item.id = row[2]\n item.spec = row[6]\n item.product_type = row[7]\n item.quantity = row[11]\n if item.quantity == '':\n item.quantity = 0\n item.free_pcs = row[12]\n if item.free_pcs == '':\n item.free_pcs = 0\n item.customer_id = row[5]\n item.salesman_id = row[14]\n item.weight = int(row[3])\n item.create_time = row[9]\n total_items.append(item)\n item_dict = {}\n for item in total_items:\n item_dict.setdefault(item.id, []).append(item)\n for items in item_dict.values():\n while items:\n sheets = dispatch_filter.filter(items)\n if sheets:\n for sheet in sheets:\n sheet.delivery_no = UUIDUtil.create_id('ds')\n sheet.customer_id = sheet.items[0].customer_id\n sheet.salesman_id = sheet.items[0].salesman_id\n sheet.create_time = sheet.items[0].create_time\n sheet.weight = 0\n for di in sheet.items:\n di.delivery_item_no = UUIDUtil.create_id('di')\n di.delivery_no = sheet.delivery_no\n sheet.weight += di.weight\n delivery_sheet_dao.batch_insert(sheets)\n",
"<import token>\nif __name__ == '__main__':\n file = open('output.csv', 'r', encoding='utf-8')\n reader = csv.reader(file)\n total_items = []\n for row in reader:\n item = DeliveryItem()\n item.delivery_item_no = UUIDUtil.create_id('di')\n item.id = row[2]\n item.spec = row[6]\n item.product_type = row[7]\n item.quantity = row[11]\n if item.quantity == '':\n item.quantity = 0\n item.free_pcs = row[12]\n if item.free_pcs == '':\n item.free_pcs = 0\n item.customer_id = row[5]\n item.salesman_id = row[14]\n item.weight = int(row[3])\n item.create_time = row[9]\n total_items.append(item)\n item_dict = {}\n for item in total_items:\n item_dict.setdefault(item.id, []).append(item)\n for items in item_dict.values():\n while items:\n sheets = dispatch_filter.filter(items)\n if sheets:\n for sheet in sheets:\n sheet.delivery_no = UUIDUtil.create_id('ds')\n sheet.customer_id = sheet.items[0].customer_id\n sheet.salesman_id = sheet.items[0].salesman_id\n sheet.create_time = sheet.items[0].create_time\n sheet.weight = 0\n for di in sheet.items:\n di.delivery_item_no = UUIDUtil.create_id('di')\n di.delivery_no = sheet.delivery_no\n sheet.weight += di.weight\n delivery_sheet_dao.batch_insert(sheets)\n",
"<import token>\n<code token>\n"
] | false |
99,022 |
d27792ef1aed67ad62d9aa9ba8c4b17e8911ec3e
|
from pieces import *
class Board:
"""
Klasa koja implementira strukturu table.
"""
def __init__(self, rows=20, cols=20):
self.rows = rows # broj redova
self.cols = cols # broj kolona
self.elems = ['.', # prazno polje
'bp', # crni pijun
'br', # crni top
'bn', # crni konj
'bb', # crni lovac
'bk', # crni kralj
'bq', # crna kraljica
'wp', # beli pijun
'wr', # beli top
'wn', # beli konj
'wb', # beli lovac
'wk', # beli kralj
'wq'] # beli kraljica
self.data = [['.'] * cols for _ in range(rows)]
self.previous_positions = [-7, -7]
#provera belih figura da li su koriscene u partiji
self.kralj_beli_koriscen = False
self.top_beli_levi_koriscen = False
self.top_beli_desni_koriscen = False
# provera crnih figura da li su koriscene u partiji
self.kralj_crni_koriscen = False
self.top_crni_levi_koriscen = False
self.top_crni_desni_koriscen = False
def load_from_file(self, file_path):
"""
Ucitavanje table iz fajla.
:param file_path: putanja fajla.
"""
board_f = open(file_path, 'r')
row = board_f.readline().strip('\n')
self.data = []
while row != '':
self.data.append(list(row.split()))
row = board_f.readline().strip('\n')
board_f.close()
def save_to_file(self, file_path):
"""
Snimanje table u fajl.
:param file_path: putanja fajla.
"""
if file_path:
f = open(file_path, 'w')
for row in range(self.rows):
f.write(''.join(self.data[row]) + '\n')
f.close()
def move_piece(self, from_row, from_col, to_row, to_col):
"""
Pomeranje figure.
:param from_row: prethodni red figure.
:param from_col: prethodna kolona figure.
:param to_row: novi red figure.
:param to_col: nova kolona figure.
"""
if to_row < len(self.data) and to_col < len(self.data[0]):
t = self.data[from_row][from_col]
self.data[from_row][from_col] = '.'
self.data[to_row][to_col] = t
if (from_row == 7 and from_col == 4):
self.kralj_beli_koriscen = True
elif (from_row == 7 and from_col == 7):
self.top_beli_desni_koriscen = True
elif (from_row == 7 and from_col == 0):
self.top_beli_levi_koriscen = True
elif (from_row == 0 and from_col == 4):
self.kralj_crni_koriscen = True
elif (from_row == 0 and from_col == 7):
self.top_crni_desni_koriscen = True
elif (from_row == 0 and from_col == 0):
self.top_crni_levi_koriscen = True
self.previous_positions = [to_row, to_col]
def clear(self):
"""
Ciscenje sadrzaja cele table.
"""
for row in range(self.rows):
for col in range(self.cols):
self.data[row][col] = '.'
def find_position(self, element):
"""
Pronalazenje specificnog elementa unutar table.
:param element: kod elementa.
:returns: tuple(int, int)
"""
for row in range(self.rows):
for col in range(self.cols):
if self.data[row][col] == element:
return row, col
return None, None
def determine_piece(self, row, col):
"""
Odredjivanje koja je figura na odredjenoj poziciji na tabli.
:param row: red.
:param col: kolona.
:return: objekat figure (implementacija klase Piece).
"""
elem = self.data[row][col]
if elem != '.':
side = elem[0] # da li je crni (b) ili beli (w)
piece = elem[1] # kod figure
if piece == 'p':
return Pawn(self, row, col, side)
# TODO: dodati za ostale figure
if piece == 'n':
return Knight(self,row,col,side)
if piece == 'b':
return Bishop(self,row,col,side)
if piece == 'r':
return Rook(self,row,col,side)
if piece == 'q':
return Queen(self,row,col,side)
if piece == 'k':
return King(self, row, col, side)
def rokadaM(self, color):
"""
Mala rokada kada pozicije menjaju kralj i top sa desne strane.
"""
if(color == 'w'):
self.data[7][5] = 'wr'
self.data[7][6] = 'wk'
self.data[7][4] = '.'
self.data[7][7] = '.'
self.kralj_beli_koriscen = True
self.previous_positions = [7, 6]
else:
self.data[0][5] = 'br'
self.data[0][6] = 'bk'
self.data[0][4] = '.'
self.data[0][7] = '.'
self.kralj_crni_koriscen = True
self.previous_positions = [0, 6]
def rokadaV(self, color):
"""
Velika rokada kada pozicije menjaju kralj i top sa leve strane.
"""
if(color == 'w'):
self.data[7][3] = 'wr'
self.data[7][2] = 'wk'
self.data[7][4] = '.'
self.data[7][0] = '.'
self.kralj_beli_koriscen = True
self.previous_positions = [7, 2]
else:
self.data[0][3] = 'br'
self.data[0][2] = 'bk'
self.data[0][4] = '.'
self.data[0][0] = '.'
self.kralj_crni_koriscen = True
self.previous_positions = [0, 2]
def en_passant(self, from_row, from_col, to_row, to_col):
"""
En passant
"""
t = self.data[from_row][from_col]
self.data[from_row][from_col] = '.'
self.data[to_row][to_col] = t
self.data[from_row][to_col] = '.'
def sah(self, side, king_position=None):
"""
Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja
"""
if king_position is None:
king_position = self.find_position(str(side) + 'k')
if side == 'w':
napadac = 'b'
else:
napadac = 'w'
for row in range(self.rows):
for col in range(self.cols):
if self.data[row][col] != '.' and (not self.data[row][col].startswith(side)) and self.data[row][
col] != napadac + 'k':
piece = self.determine_piece(row, col)
positions = piece.get_legal_moves()
if king_position in positions:
return True
return False
def napadnuta_pozicija(self, side,figure_postion):
"""
Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja
"""
if side == 'w':
napadac = 'b'
else:
napadac = 'w'
for row in range(self.rows):
for col in range(self.cols):
if self.data[row][col] != '.' and (not self.data[row][col].startswith(side)) and self.data[row][
col] != napadac + 'k':
piece = self.determine_piece(row, col)
positions = piece.get_legal_moves()
if figure_postion in positions:
return True
return False
|
[
"from pieces import *\n\nclass Board:\n \"\"\"\n Klasa koja implementira strukturu table.\n \"\"\"\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows # broj redova\n self.cols = cols # broj kolona\n self.elems = ['.', # prazno polje\n 'bp', # crni pijun\n 'br', # crni top\n 'bn', # crni konj\n 'bb', # crni lovac\n 'bk', # crni kralj\n 'bq', # crna kraljica\n 'wp', # beli pijun\n 'wr', # beli top\n 'wn', # beli konj\n 'wb', # beli lovac\n 'wk', # beli kralj\n 'wq'] # beli kraljica\n\n self.data = [['.'] * cols for _ in range(rows)]\n\n self.previous_positions = [-7, -7]\n\n #provera belih figura da li su koriscene u partiji\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n\n # provera crnih figura da li su koriscene u partiji\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n\n if (from_row == 7 and from_col == 4):\n self.kralj_beli_koriscen = True\n elif (from_row == 7 and from_col == 7):\n self.top_beli_desni_koriscen = True\n elif (from_row == 7 and from_col == 0):\n self.top_beli_levi_koriscen = True\n elif (from_row == 0 and from_col == 4):\n self.kralj_crni_koriscen = True\n elif (from_row == 0 and from_col == 7):\n self.top_crni_desni_koriscen = True\n elif (from_row == 0 and from_col == 0):\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n\n def clear(self):\n \"\"\"\n Ciscenje sadrzaja cele table.\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n self.data[row][col] = '.'\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0] # da li je crni (b) ili beli (w)\n piece = elem[1] # kod figure\n if piece == 'p':\n return Pawn(self, row, col, side)\n # TODO: dodati za ostale figure\n if piece == 'n':\n return Knight(self,row,col,side)\n if piece == 'b':\n return Bishop(self,row,col,side)\n if piece == 'r':\n return Rook(self,row,col,side)\n if piece == 'q':\n return Queen(self,row,col,side)\n if piece == 'k':\n return King(self, row, col, side)\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if(color == 'w'):\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if(color == 'w'):\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n\n\n\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n\n\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and (not self.data[row][col].startswith(side)) and self.data[row][\n col] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n\n if king_position in positions:\n return True\n\n return False\n\n\n\n\n def napadnuta_pozicija(self, side,figure_postion):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and (not self.data[row][col].startswith(side)) and self.data[row][\n col] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n\n if figure_postion in positions:\n return True\n return False\n\n\n\n\n\n",
"from pieces import *\n\n\nclass Board:\n \"\"\"\n Klasa koja implementira strukturu table.\n \"\"\"\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n\n def clear(self):\n \"\"\"\n Ciscenje sadrzaja cele table.\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n self.data[row][col] = '.'\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n\n def napadnuta_pozicija(self, side, figure_postion):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if figure_postion in positions:\n return True\n return False\n",
"<import token>\n\n\nclass Board:\n \"\"\"\n Klasa koja implementira strukturu table.\n \"\"\"\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n\n def clear(self):\n \"\"\"\n Ciscenje sadrzaja cele table.\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n self.data[row][col] = '.'\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n\n def napadnuta_pozicija(self, side, figure_postion):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if figure_postion in positions:\n return True\n return False\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n\n def clear(self):\n \"\"\"\n Ciscenje sadrzaja cele table.\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n self.data[row][col] = '.'\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n\n def napadnuta_pozicija(self, side, figure_postion):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if figure_postion in positions:\n return True\n return False\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n\n def napadnuta_pozicija(self, side, figure_postion):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if figure_postion in positions:\n return True\n return False\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n\n def en_passant(self, from_row, from_col, to_row, to_col):\n \"\"\"\n En passant\n \"\"\"\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n self.data[from_row][to_col] = '.'\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n\n def rokadaM(self, color):\n \"\"\"\n Mala rokada kada pozicije menjaju kralj i top sa desne strane.\n \"\"\"\n if color == 'w':\n self.data[7][5] = 'wr'\n self.data[7][6] = 'wk'\n self.data[7][4] = '.'\n self.data[7][7] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 6]\n else:\n self.data[0][5] = 'br'\n self.data[0][6] = 'bk'\n self.data[0][4] = '.'\n self.data[0][7] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 6]\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n\n def load_from_file(self, file_path):\n \"\"\"\n Ucitavanje table iz fajla.\n :param file_path: putanja fajla.\n \"\"\"\n board_f = open(file_path, 'r')\n row = board_f.readline().strip('\\n')\n self.data = []\n while row != '':\n self.data.append(list(row.split()))\n row = board_f.readline().strip('\\n')\n board_f.close()\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n\n def move_piece(self, from_row, from_col, to_row, to_col):\n \"\"\"\n Pomeranje figure.\n :param from_row: prethodni red figure.\n :param from_col: prethodna kolona figure.\n :param to_row: novi red figure.\n :param to_col: nova kolona figure.\n \"\"\"\n if to_row < len(self.data) and to_col < len(self.data[0]):\n t = self.data[from_row][from_col]\n self.data[from_row][from_col] = '.'\n self.data[to_row][to_col] = t\n if from_row == 7 and from_col == 4:\n self.kralj_beli_koriscen = True\n elif from_row == 7 and from_col == 7:\n self.top_beli_desni_koriscen = True\n elif from_row == 7 and from_col == 0:\n self.top_beli_levi_koriscen = True\n elif from_row == 0 and from_col == 4:\n self.kralj_crni_koriscen = True\n elif from_row == 0 and from_col == 7:\n self.top_crni_desni_koriscen = True\n elif from_row == 0 and from_col == 0:\n self.top_crni_levi_koriscen = True\n self.previous_positions = [to_row, to_col]\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n <function token>\n <function token>\n\n def find_position(self, element):\n \"\"\"\n Pronalazenje specificnog elementa unutar table.\n :param element: kod elementa.\n :returns: tuple(int, int)\n \"\"\"\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] == element:\n return row, col\n return None, None\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n <function token>\n <function token>\n <function token>\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n\n def sah(self, side, king_position=None):\n \"\"\"\n Provera da li je napadnut kralj ako se moguce pozicije protivnika poklapaju sa pozicijom kralja\n \"\"\"\n if king_position is None:\n king_position = self.find_position(str(side) + 'k')\n if side == 'w':\n napadac = 'b'\n else:\n napadac = 'w'\n for row in range(self.rows):\n for col in range(self.cols):\n if self.data[row][col] != '.' and not self.data[row][col\n ].startswith(side) and self.data[row][col\n ] != napadac + 'k':\n piece = self.determine_piece(row, col)\n positions = piece.get_legal_moves()\n if king_position in positions:\n return True\n return False\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n\n def save_to_file(self, file_path):\n \"\"\"\n Snimanje table u fajl.\n :param file_path: putanja fajla.\n \"\"\"\n if file_path:\n f = open(file_path, 'w')\n for row in range(self.rows):\n f.write(''.join(self.data[row]) + '\\n')\n f.close()\n <function token>\n <function token>\n <function token>\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n\n def rokadaV(self, color):\n \"\"\"\n Velika rokada kada pozicije menjaju kralj i top sa leve strane.\n \"\"\"\n if color == 'w':\n self.data[7][3] = 'wr'\n self.data[7][2] = 'wk'\n self.data[7][4] = '.'\n self.data[7][0] = '.'\n self.kralj_beli_koriscen = True\n self.previous_positions = [7, 2]\n else:\n self.data[0][3] = 'br'\n self.data[0][2] = 'bk'\n self.data[0][4] = '.'\n self.data[0][0] = '.'\n self.kralj_crni_koriscen = True\n self.previous_positions = [0, 2]\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def determine_piece(self, row, col):\n \"\"\"\n Odredjivanje koja je figura na odredjenoj poziciji na tabli.\n :param row: red.\n :param col: kolona.\n :return: objekat figure (implementacija klase Piece).\n \"\"\"\n elem = self.data[row][col]\n if elem != '.':\n side = elem[0]\n piece = elem[1]\n if piece == 'p':\n return Pawn(self, row, col, side)\n if piece == 'n':\n return Knight(self, row, col, side)\n if piece == 'b':\n return Bishop(self, row, col, side)\n if piece == 'r':\n return Rook(self, row, col, side)\n if piece == 'q':\n return Queen(self, row, col, side)\n if piece == 'k':\n return King(self, row, col, side)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n\n def __init__(self, rows=20, cols=20):\n self.rows = rows\n self.cols = cols\n self.elems = ['.', 'bp', 'br', 'bn', 'bb', 'bk', 'bq', 'wp', 'wr',\n 'wn', 'wb', 'wk', 'wq']\n self.data = [(['.'] * cols) for _ in range(rows)]\n self.previous_positions = [-7, -7]\n self.kralj_beli_koriscen = False\n self.top_beli_levi_koriscen = False\n self.top_beli_desni_koriscen = False\n self.kralj_crni_koriscen = False\n self.top_crni_levi_koriscen = False\n self.top_crni_desni_koriscen = False\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n\n\nclass Board:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,023 |
023b94ecc538c03c6b5279822c7e773f8cd80a39
|
from django.urls import path
from .import views
urlpatterns = [
path('payment', views.payment, name="payment")
]
|
[
"from django.urls import path\nfrom .import views\n\nurlpatterns = [\n path('payment', views.payment, name=\"payment\")\n]\n",
"from django.urls import path\nfrom . import views\nurlpatterns = [path('payment', views.payment, name='payment')]\n",
"<import token>\nurlpatterns = [path('payment', views.payment, name='payment')]\n",
"<import token>\n<assignment token>\n"
] | false |
99,024 |
0dd39cf1f28dbf6b1f154cbd37f9d0af442bf8bd
|
# list
persons = ['laosan', 'laosi', 'laowu']
# append,insert添加 pop删除
# persons.append('laoliu')
# print(persons)
# print(len(persons))
# print(persons[-3])
# 直接赋值
# persons[0] = 'laosana'
# print(persons)
# python 无泛型
# list = ['laosan', 123, True]
# # list 可以包含list 相当于二维数组
# list1 = ['laosan', 123, ['1', '2'], True]
# print(list1[2][1])
# print(list)
# tuple 区别与list tuple是有序的,声明后不可修改。
# tuple = ('laosan', 'laosi', 'laowu')
#
# print(tuple)
# tuple 的指向不变,但是tuple中的list可变 如:
t = (1, 2, 3, [4, 5])
print(t)
t[3][0] = 6
t[3][1] = 7
print(t)
|
[
"# list\npersons = ['laosan', 'laosi', 'laowu']\n\n# append,insert添加 pop删除\n# persons.append('laoliu')\n# print(persons)\n# print(len(persons))\n# print(persons[-3])\n\n# 直接赋值\n# persons[0] = 'laosana'\n\n# print(persons)\n\n# python 无泛型\n# list = ['laosan', 123, True]\n# # list 可以包含list 相当于二维数组\n# list1 = ['laosan', 123, ['1', '2'], True]\n# print(list1[2][1])\n# print(list)\n\n# tuple 区别与list tuple是有序的,声明后不可修改。\n# tuple = ('laosan', 'laosi', 'laowu')\n#\n# print(tuple)\n\n# tuple 的指向不变,但是tuple中的list可变 如:\nt = (1, 2, 3, [4, 5])\nprint(t)\nt[3][0] = 6\nt[3][1] = 7\nprint(t)\n\n\n",
"persons = ['laosan', 'laosi', 'laowu']\nt = 1, 2, 3, [4, 5]\nprint(t)\nt[3][0] = 6\nt[3][1] = 7\nprint(t)\n",
"<assignment token>\nprint(t)\n<assignment token>\nprint(t)\n",
"<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,025 |
b1264c005d7e311cb68687f923d0ae054b11ecb2
|
# %%
import csv
import warnings
import sklearn
import pandas as pd
import gc
from data_loading import *
from timbre_CNN import *
from evaluation import *
from torch.utils.data import DataLoader, sampler
from melody_loading import *
result_dir = "results"
model_dir = "models"
model_name = "_retrained"
val_interval = 5
perform_hyp_search = False
perform_cross_val = False
evaluation_bs = 256
#timbre_CNN_type = SingleNoteTimbreCNN
timbre_CNN_type = SingleNoteTimbreCNNSmall
#timbre_CNN_type = MelodyTimbreCNN
#timbre_CNN_type = MelodyTimbreCNNSmall
# Hyperparameters
hyperparams_single = {'batch_size': 64,
'epochs': 20,
'learning_rate': 0.002,
'loss_function': nn.BCELoss()}
hyperparams_melody = {"batch_size": 128, # GTX 1050 limits us to <512
"epochs": 25,
"learning_rate": 0.003,
"loss_function": nn.BCELoss()}
def generate_split_indices(data, partition_ratios=None, mode="mixed", seed=None):
# Make a random set of shuffled indices for sampling training/test sets randomly w/o overlap
if partition_ratios is None:
partition_ratios = [0.8, 0.1]
rng = np.random.default_rng(seed=seed)
if mode == "segment-instruments-random":
instruments = data.instrument.unique()
rng.shuffle(instruments)
i = 0
indices_train = []
indices_val = []
indices_test = []
no_more_instruments = False
# Iterate through instruments and add them to the training/validation set indices until ratios are reached
next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]
while (len(indices_train) + len(next_instrument_indices))/len(data) <= partition_ratios[0]:
indices_train = np.append(indices_train, next_instrument_indices)
i += 1
if i >= len(instruments):
no_more_instruments = True
break
next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]
while (len(indices_train) + len(indices_val) + len(next_instrument_indices))/len(data) \
<= partition_ratios[0] + partition_ratios[1] \
and not no_more_instruments:
indices_val = np.append(indices_val, next_instrument_indices)
i += 1
if i >= len(instruments):
break
next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]
for j in range(i, len(instruments)):
indices_test = np.append(indices_test, np.asarray(data.instrument == instruments[j]).nonzero()[0])
np.random.shuffle(indices_train)
np.random.shuffle(indices_val)
np.random.shuffle(indices_test)
elif mode == "segment-instruments-random-balanced":
instruments_grand = data[data.label == 0].instrument.unique()
instruments_upright = data[data.label == 1].instrument.unique()
rng.shuffle(instruments_grand)
rng.shuffle(instruments_upright)
num_train_instruments = np.round(partition_ratios[0] * len(data.instrument.unique()))
num_val_instruments = np.round(partition_ratios[1] * len(data.instrument.unique()))
indices_train = []
indices_val = []
indices_test = []
i_grand = 0
i_upright = 0
for i in range(0, len(data.instrument.unique())):
if i % 2 and i_upright < len(instruments_upright):
next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]
i_upright += 1
elif i_grand < len(instruments_grand):
next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]
i_grand += 1
else:
break
if i < num_train_instruments:
indices_train = np.append(indices_train, next_instrument_indices)
elif i < num_train_instruments+num_val_instruments:
indices_val = np.append(indices_val, next_instrument_indices)
else:
indices_test = np.append(indices_test, next_instrument_indices)
if np.sum(partition_ratios) == 1: # Combine val and test sets if no test set required
indices_val = np.append(indices_val, indices_test)
indices_test = []
np.random.shuffle(indices_train)
np.random.shuffle(indices_val)
np.random.shuffle(indices_test)
elif mode == "segment-instruments-manual":
# train_instruments = ["AkPnBcht", "AkPnBsdf", "grand-closed", "grand-removed", "grand-open",
# "upright-open", "upright-semiopen", "upright-closed"]
# val_instruments = ["StbgTGd2", "AkPnCGdD", "ENSTDkCl"]
# test_instruments = ["AkPnStgb", "SptkBGAm", "ENSTDkAm"]
# train_instruments = ["Nord_BrightGrand-XL", "Nord_AmberUpright-XL",
# "Nord_ConcertGrand1Amb-Lrg", "Nord_BabyUpright-XL",
# "Nord_GrandImperial-XL", "Nord_BlackUpright-Lrg",
# "Nord_GrandLadyD-Lrg", "Nord_BlueSwede-Lrg",
# "Nord_RoyalGrand3D-XL", "Nord_MellowUpright-XL",
# "Nord_SilverGrand-XL", "Nord_QueenUpright-Lrg",
# "Nord_StudioGrand1-Lrg", "Nord_RainPiano-Lrg"]
# val_instruments = ["Nord_ItalianGrand-XL", "Nord_GrandUpright-XL",
# "Nord_StudioGrand2-Lrg"]
# test_instruments = ["Nord_VelvetGrand-XL", "Nord_RomanticUpright-Lrg",
# "Nord_WhiteGrand-XL", "Nord_SaloonUpright-Lrg",
# "Nord_ConcertGrand1-Lrg", "Nord_BambinoUpright-XL"]
train_instruments = ["Nord_BrightGrand-XL", "Nord_AmberUpright-XL",
"Nord_ConcertGrand1-Lrg", "Nord_BabyUpright-XL",
"Nord_GrandImperial-XL", "Nord_BlackUpright-Lrg",
"Nord_RoyalGrand3D-XL", "Nord_MellowUpright-XL",
"Nord_StudioGrand1-Lrg", "Nord_RainPiano-Lrg",
"Nord_WhiteGrand-XL", "Nord_RomanticUpright-Lrg",
"Nord_VelvetGrand-XL", "Nord_GrandUpright-XL",
"Nord_StudioGrand2-Lrg", "Nord_SaloonUpright-Lrg",
"Nord_ItalianGrand-XL", "Nord_BlueSwede-Lrg"]
val_instruments = ["Nord_ConcertGrand1Amb-Lrg", "Nord_BambinoUpright-XL",
"Nord_GrandLadyD-Lrg", "Nord_QueenUpright-Lrg",
"Nord_SilverGrand-XL"]
test_instruments = []
indices_train = np.asarray(data.instrument.isin(train_instruments)).nonzero()[0]
indices_val = np.asarray(data.instrument.isin(val_instruments)).nonzero()[0]
indices_test = np.asarray(data.instrument.isin(test_instruments)).nonzero()[0]
np.random.shuffle(indices_train)
np.random.shuffle(indices_val)
np.random.shuffle(indices_test)
elif mode == "segment-velocities":
indices_train = np.asarray(data.velocity == "M").nonzero()[0]
indices_val = np.asarray(data.velocity == "P").nonzero()[0]
indices_test = np.asarray(data.velocity == "F").nonzero()[0]
np.random.shuffle(indices_train)
np.random.shuffle(indices_val)
np.random.shuffle(indices_test)
elif mode == "mixed":
# Reproducible random shuffle of indices, using a fixed seed
indices = np.arange(len(data))
rng.shuffle(indices)
split_point_train = int(len(data) * partition_ratios[0])
split_point_val = split_point_train + int(len(data) * partition_ratios[1])
indices_train = indices[:split_point_train]
indices_val = indices[split_point_train:split_point_val]
indices_test = indices[split_point_val:]
else:
raise Exception("Mode not recognised")
# Print training, validation and test set statistics
print("")
indices_train = indices_train.astype(int)
indices_val = indices_val.astype(int)
print(len(indices_train), "training samples")
print(len(indices_val), "validation samples")
print(len(indices_test), "test samples")
train_class_balance = data.iloc[indices_train].label.sum(axis=0)/len(indices_train)
print("Train set contains", np.round(train_class_balance * 100), "% Upright pianos")
if mode == "segment_instruments":
print("\t", pd.unique(data.iloc[indices_train].instrument))
val_class_balance = data.iloc[indices_val].label.sum(axis=0)/len(indices_val)
print("Validation set contains", np.round(val_class_balance * 100), "% Upright pianos")
if mode == "segment_instruments":
print("\t", pd.unique(data.iloc[indices_val].instrument))
if len(indices_test) == 0:
indices_test = np.array([])
indices_test = indices_test.astype(int)
else:
indices_test = indices_test.astype(int)
test_class_balance = data.iloc[indices_test].label.sum(axis=0)/len(indices_test)
print("Test set contains", np.round(test_class_balance * 100), "% Upright pianos")
if mode == "segment_instruments":
print("\t", pd.unique(data.iloc[indices_test].instrument))
print("Overall, dataset contains", np.round(100 * data.label.sum(axis=0)/len(data)), "% Upright pianos")
return indices_train, indices_val, indices_test
def generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):
rng = np.random.default_rng(seed=seed)
instruments_grand = data[data.label == 0].instrument.unique()
instruments_upright = data[data.label == 1].instrument.unique()
rng.shuffle(instruments_grand)
rng.shuffle(instruments_upright)
num_instruments_fold1 = np.round(len(data.instrument.unique())/folds)
num_instruments_fold2 = np.round(len(data.instrument.unique())/folds)
num_instruments_fold3 = np.round(len(data.instrument.unique())/folds)
num_instruments_fold4 = np.round(len(data.instrument.unique())/folds)
indices_fold1 = []
indices_fold2 = []
indices_fold3 = []
indices_fold4 = []
indices_fold5 = []
i_grand = 0
i_upright = 0
if folds == 5:
for i in range(0, len(data.instrument.unique())):
if i % 2 and i_upright < len(instruments_upright):
next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]
i_upright += 1
elif i_grand < len(instruments_grand):
next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]
i_grand += 1
else:
break
if i < num_instruments_fold1:
indices_fold1 = np.append(indices_fold1, next_instrument_indices).astype(int)
elif i < num_instruments_fold1 + num_instruments_fold2:
indices_fold2 = np.append(indices_fold2, next_instrument_indices).astype(int)
elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:
indices_fold3 = np.append(indices_fold3, next_instrument_indices).astype(int)
elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:
indices_fold4 = np.append(indices_fold4, next_instrument_indices).astype(int)
else:
indices_fold5 = np.append(indices_fold5, next_instrument_indices).astype(int)
elif folds == 4:
for i in range(0, len(data.instrument.unique())):
if i % 2 and i_upright < len(instruments_upright):
next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]
i_upright += 1
elif i_grand < len(instruments_grand):
next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]
i_grand += 1
else:
break
if i < num_instruments_fold1:
indices_fold1 = np.append(indices_fold1, next_instrument_indices).astype(int)
elif i < num_instruments_fold1 + num_instruments_fold2:
indices_fold2 = np.append(indices_fold2, next_instrument_indices).astype(int)
elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:
indices_fold3 = np.append(indices_fold3, next_instrument_indices).astype(int)
else:
indices_fold4 = np.append(indices_fold4, next_instrument_indices).astype(int)
np.random.shuffle(indices_fold1)
np.random.shuffle(indices_fold2)
np.random.shuffle(indices_fold3)
np.random.shuffle(indices_fold4)
np.random.shuffle(indices_fold5)
if verbose:
print(len(indices_fold1), "samples in fold 1")
print("\t", pd.unique(data.iloc[indices_fold1].instrument))
print(len(indices_fold2), "samples in fold 2")
print("\t", pd.unique(data.iloc[indices_fold2].instrument))
print(len(indices_fold3), "samples in fold 3")
print("\t", pd.unique(data.iloc[indices_fold3].instrument))
print(len(indices_fold4), "samples in fold 4")
print("\t", pd.unique(data.iloc[indices_fold4].instrument))
if folds == 5:
print(len(indices_fold5), "samples in fold 5")
print("\t", pd.unique(data.iloc[indices_fold5].instrument))
return indices_fold1, indices_fold2, indices_fold3, indices_fold4, indices_fold5
def train_model(cnn_type, params, local_dataset, train_ind, val_loader=None, plot=True, plot_title="", verbose=True):
if verbose:
print("\n--------------TRAINING MODEL--------------")
print(timbre_CNN_type.__name__, "with parameters:")
print(params)
# Unpack the hyperparameters
batch_size = params["batch_size"]
epochs = params["epochs"]
learning_rate = params["learning_rate"]
loss_function = params["loss_function"]
loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle=False,
sampler=sampler.SubsetRandomSampler(train_ind),
pin_memory=True)
model = cnn_type().to(device, non_blocking=True)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
with torch.enable_grad():
loss_train_log = []
loss_val_log = []
epoch_val_log = []
for epoch in range(epochs):
model.train()
running_loss = 0.0
for i, batch in enumerate(loader_train):
x = batch[0].float().to(device, non_blocking=True)
label = batch[1].float().to(device, non_blocking=True)
optimizer.zero_grad()
y = model(x)
loss = loss_function(y, label)
loss.backward()
optimizer.step()
running_loss += loss.detach()
gc.collect()
# Record training loss
mean_epoch_loss = (running_loss/(batch_size*(i+1))).item()
if verbose:
print("+Training - Epoch", epoch+1, "loss:", mean_epoch_loss)
loss_train_log.append(mean_epoch_loss)
# Calculate loss on validation set
if (epoch == epochs-1 or epoch % val_interval == 0) and val_loader is not None and plot:
loss_val = 0
model.eval()
with torch.no_grad():
for i, batch in enumerate(val_loader):
x = batch[0].float().to(device, non_blocking=True)
label = batch[1].float().to(device, non_blocking=True)
y = model(x)
loss_val += loss_function(y, label).detach()
gc.collect()
mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item()
print("\t+Validation - Epoch", epoch + 1, "loss:", mean_epoch_val_loss)
loss_val_log.append(mean_epoch_val_loss)
epoch_val_log.append(epoch+1)
# Plot training curves
fig = None
if plot:
fig = plt.figure()
plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')
if val_loader is not None:
plt.plot(epoch_val_log, loss_val_log, c='b', label='val')
plt.legend()
plt.xlabel('epoch')
plt.ylabel('loss')
plt.xticks(np.arange(1, epochs+1))
plt.grid()
plt.title("Loss curve over "+str(epochs)+" epochs of training - "+plot_title)
plt.tight_layout()
plt.show()
return model, fig
def evaluate_CNN(evaluated_model, test_set):
labels_total = np.empty(0, dtype=int)
preds_total = np.empty(0, dtype=int)
instruments_acc = np.empty(0, dtype=str)
# Inference mode
evaluated_model.eval()
with torch.no_grad():
evaluated_model = evaluated_model.to(device, non_blocking=True)
for batch in test_set:
x = batch[0].float().to(device, non_blocking=True)
label = batch[1].float().to(device, non_blocking=True)
y = evaluated_model(x)
#print("+Evaluating - Batch loss:", loss_function(y, label).item())
pred = torch.round(y)
# Accumulate per-batch ground truths, outputs and instrument names
labels_total = np.append(labels_total, label.cpu())
preds_total = np.append(preds_total, pred.cpu())
instruments_acc = np.append(instruments_acc, np.array(batch[2]))
# Calculate scores per instrument
per_inst_scores = pd.DataFrame()
for instrument in np.unique(instruments_acc):
instrument_mask = np.nonzero(instruments_acc == instrument)
# Ignore Confusion matrix, balanced accuracy and F1 score which are irrelevant here
instrument_scores = evaluate_scores(labels_total[instrument_mask], preds_total[instrument_mask])
piano_class = "Upright" if labels_total[instrument_mask][0] else "Grand"
per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(instrument_scores["Accuracy"],2),piano_class]],
index=pd.Index([instrument], name="Instrument"),
columns=["Accuracy", "Class"]))
# Calculate overall scores
overall_scores = evaluate_scores(labels_total, preds_total)
return overall_scores, per_inst_scores
def cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2, partition_mode=None, plot_train_curves=True, verbose=True):
cv_dataset = TimbreDataset(cross_val_subset)
total_scores = pd.DataFrame()
if cv_folds == 2:
set_1, set_2, _ = generate_split_indices(cross_val_subset, partition_ratios=[0.5, 0.5], mode=partition_mode)
training_sets = [set_1, set_2]
validation_sets = [set_2, set_1]
elif cv_folds == 4:
fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)
training_sets = [np.concatenate([fold2, fold3, fold4]),
np.concatenate([fold3, fold4, fold1]),
np.concatenate([fold4, fold1, fold2]),
np.concatenate([fold1, fold2, fold3])]
validation_sets = [fold1, fold2, fold3, fold4]
elif cv_folds == 5:
fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)
training_sets = [np.concatenate([fold2, fold3, fold4, fold5]),
np.concatenate([fold3, fold4, fold5, fold1]),
np.concatenate([fold4, fold5, fold1, fold2]),
np.concatenate([fold5, fold1, fold2, fold3]),
np.concatenate([fold1, fold2, fold3, fold4])]
validation_sets = [fold1, fold2, fold3, fold4, fold5]
else:
raise Exception("CV mode "+str(cv_folds)+" not implemented")
for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(training_sets, validation_sets)):
print("\n----------------CV FOLD "+str(fold+1)+"-----------------")
val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle=False,
sampler=sampler.SubsetRandomSampler(val_fold_indices), pin_memory=True)
model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,
local_dataset=cv_dataset, train_ind=train_fold_indices, val_loader=val_fold,
plot=plot_train_curves, plot_title="CV Fold "+str(fold+1), verbose=verbose)
scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)
if verbose:
print("\n------Fold "+str(fold+1)+" validation set scores--------")
print(per_inst_scores_fold)
display_scores(scores_fold, plot_conf=False)
numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in scores_fold.items() if k in ["Accuracy", "F1", "acc_grand", "acc_upright", "balanced_acc", "min_class_acc"]})
numeric_scores_fold["no_samples"] = len(val_fold_indices)
total_scores = total_scores.append(numeric_scores_fold)
# Calculate overall cross-validation statistics, weighted by the number of validation samples in each fold
weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_mean_min_class_acc = (total_scores.min_class_acc * total_scores.no_samples).sum() / total_scores.no_samples.sum()
weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=total_scores.no_samples))
weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores.no_samples))
weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand, fweights=total_scores.no_samples))
weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright, fweights=total_scores.no_samples))
weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc, fweights=total_scores.no_samples))
weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc, fweights=total_scores.no_samples))
cv_scores_stats = pd.DataFrame({"mean": [weighted_mean_acc, weighted_mean_f1, weighted_mean_acc_grand, weighted_mean_acc_upright, weighted_mean_bal_acc, weighted_mean_min_class_acc],
"std": [weighted_std_acc, weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright, weighted_std_bal_acc, weighted_std_min_class_acc]},
index=["Accuracy", "F1", "Grand class accuracy", "Upright class accuracy", "Balanced (macro-avg) accuracy", "Min per-class accuracy"])
return cv_scores_stats
def hyperparameter_search(cnn_type, training_dataset,
batch_size_space,
epochs_space,
lr_space,
loss_space=None):
if loss_space is None:
loss_space = [nn.BCELoss()]
hyp_search_csv = os.path.join(result_dir, cnn_type.__name__, "hyperparam_search.csv")
with open(hyp_search_csv, "a", newline="") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(["----------New Hyperparameter search----------"])
writer.writerow(["Batch size", "Epochs", "Learning rate", "Loss function"])
total_combinations = len(loss_space)*len(lr_space)*len(epochs_space)*len(batch_size_space)
best_score = 0
best_params = None
best_stats = None
i = 0
for epochs_local in epochs_space:
for loss_function_local in loss_space:
for batch_size_local in batch_size_space:
for learning_rate_local in lr_space:
i += 1
print("\n------ Hyperparameter search combination", i, "of", total_combinations, "------")
print("Model type:", cnn_type.__name__)
hyperparams_local={"batch_size": batch_size_local,
"epochs": epochs_local,
"learning_rate": learning_rate_local,
"loss_function": loss_function_local}
print(hyperparams_local)
cv_results = cross_validate(cnn_type=cnn_type,
hyparams=hyperparams_local,
cross_val_subset=training_dataset,
cv_folds=4,
partition_mode="segment-instruments-random-balanced",
plot_train_curves=False,
verbose=False)
# Print the results to csv
with open(hyp_search_csv, "a", newline="") as csvfile:
writer = csv.writer(csvfile)
writer.writerow([batch_size_local, epochs_local, learning_rate_local, loss_function_local])
cv_results.to_csv(hyp_search_csv, mode="a")
# Update best score using the mean over the folds of the minimum single-class accuracy
min_class_acc_local = cv_results.loc["Min per-class accuracy", "mean"]
# Ensure that the best model achieves better-than-chance macro-avg accuracy, on average across the folds
bal_acc_local = cv_results.loc["Balanced (macro-avg) accuracy", "mean"]
if min_class_acc_local > best_score and bal_acc_local > 0.5:
best_params = hyperparams_local
best_score = min_class_acc_local
best_stats = cv_results
print("\n------New best performing combination------")
print(best_params)
print("with stats:")
print(best_stats.round(3))
return best_params, best_score, best_stats
if __name__ == '__main__':
# Configure CPU or GPU using CUDA if available
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('Device:', device)
if torch.cuda.is_available():
print("GPU:", torch.cuda.get_device_name(0))
print("\n\n----------------------LOADING DATA-----------------------")
if timbre_CNN_type == SingleNoteTimbreCNN or timbre_CNN_type == SingleNoteTimbreCNNSmall:
hyperparams = hyperparams_single
loader = InstrumentLoader(data_dir, note_range=[48, 72], set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)
total_data = loader.preprocess(fmin=20, fmax=20000, n_mels=300, normalisation="statistics")
elif timbre_CNN_type == MelodyTimbreCNN or timbre_CNN_type == MelodyTimbreCNNSmall:
hyperparams = hyperparams_melody
loader = MelodyInstrumentLoader(data_dir, note_range=[48, 72], set_velocity=None, normalise_wavs=True, load_MIDIsampled=True) # Use reload_wavs=False to speed up dataloading if melspecs already generated
total_data = loader.preprocess_melodies(midi_dir, normalisation="statistics")
else:
raise Exception(str(timbre_CNN_type)+" doesn't exist")
# Split into seen and unseen subsets
data_seen = total_data[total_data.dataset == "MIDIsampled"]
data_unseen = total_data[total_data.dataset != "MIDIsampled"]
gc.collect()
if perform_hyp_search:
print("\n\n----------------HYPERPARAMETER SEARCH--------------------")
batch_size_space = [64, 128, 256]
epochs_space = [15, 20, 25]
lr_space = [0.001, 0.002, 0.003]
best_params, best_score, best_stats = hyperparameter_search(cnn_type=timbre_CNN_type, training_dataset=data_seen,
batch_size_space=batch_size_space,
epochs_space=epochs_space,
lr_space=lr_space)
print("\n---------------Hyperparameter search results---------------")
print("Model type:", timbre_CNN_type.__name__)
print("Search space:")
print("\tBatch sizes:", batch_size_space)
print("\tEpochs:", epochs_space)
print("\tLearning rates:", lr_space)
print("Best params", best_params)
print("Best score", best_score)
print("Best stats:")
print(best_stats)
if best_params is not None:
hyperparams = best_params
dataset_seen = TimbreDataset(data_seen)
train_indices, val_indices, _ = generate_split_indices(data_seen, partition_ratios=[0.8, 0.2],
mode="segment-instruments-manual")
if perform_cross_val:
print("\n\n---------------------CROSS-VALIDATION---------------------")
cv_results = cross_validate(cnn_type=timbre_CNN_type, hyparams=hyperparams,
cross_val_subset=data_seen, #data_seen.iloc[train_indices],
cv_folds=4,
partition_mode="segment-instruments-random-balanced")
print("\n-------Overall cross-validation scores-------")
print(cv_results.round(3))
print("\n\n-------------------RE-TRAINED MODEL-----------------------")
loader_val = DataLoader(dataset_seen, batch_size=evaluation_bs, shuffle=False,
sampler=sampler.SubsetRandomSampler(val_indices),
pin_memory=True)
model_filename = "model_"+str(hyperparams["batch_size"])+"_"+str(hyperparams["epochs"])+"_"+str(hyperparams["learning_rate"])+model_name
saved_model_path = os.path.join(model_dir, timbre_CNN_type.__name__, model_filename+".pth")
if not os.path.isfile(saved_model_path):
print("\nCreating and training new model")
model, loss_plot = train_model(cnn_type=timbre_CNN_type, params=hyperparams,
local_dataset=dataset_seen, train_ind=train_indices, val_loader=loader_val,
plot_title="\n"+timbre_CNN_type.__name__)
# Save model
torch.save(model, saved_model_path)
print("Saved trained model to", saved_model_path)
# Save loss plot
loss_plot.savefig(os.path.join(model_dir, timbre_CNN_type.__name__, model_filename+".svg"))
else:
print("\nLoading pre-trained model from", saved_model_path)
model = torch.load(saved_model_path)
print(model)
model.count_parameters()
# print("\n\n-------------Evaluation on the validation set-------------")
# scores_seen, per_inst_scores_seen = evaluate_CNN(model, loader_val)
# print("---------Per-instrument scores---------")
# print(per_inst_scores_seen)
# #per_inst_scores_seen.to_csv(os.path.join(result_dir, timbre_CNN_type.__name__, model_filename + ".csv"))
# print("---Overall validation set performance---")
# display_scores(scores_seen, "Validation set")
print("\n\n--------------Evaluation on the unseen set---------------")
dataset_unseen = TimbreDataset(data_unseen)
loader_unseen = DataLoader(dataset_unseen, batch_size=evaluation_bs, shuffle=False, pin_memory=True)
scores_unseen, per_inst_scores_unseen = evaluate_CNN(model, loader_unseen)
print("---------Per-instrument scores---------")
print(per_inst_scores_unseen)
per_inst_scores_unseen.to_csv(os.path.join(result_dir, timbre_CNN_type.__name__, model_filename + ".csv"), mode="a")
print("--------Overall unseen set performance--------")
display_scores(scores_unseen, "Unseen test set\n"+timbre_CNN_type.__name__)
|
[
"# %%\nimport csv\nimport warnings\nimport sklearn\nimport pandas as pd\nimport gc\n\nfrom data_loading import *\nfrom timbre_CNN import *\nfrom evaluation import *\nfrom torch.utils.data import DataLoader, sampler\nfrom melody_loading import *\n\nresult_dir = \"results\"\nmodel_dir = \"models\"\nmodel_name = \"_retrained\"\nval_interval = 5\nperform_hyp_search = False\nperform_cross_val = False\nevaluation_bs = 256\n\n#timbre_CNN_type = SingleNoteTimbreCNN\ntimbre_CNN_type = SingleNoteTimbreCNNSmall\n#timbre_CNN_type = MelodyTimbreCNN\n#timbre_CNN_type = MelodyTimbreCNNSmall\n\n# Hyperparameters\nhyperparams_single = {'batch_size': 64,\n 'epochs': 20,\n 'learning_rate': 0.002,\n 'loss_function': nn.BCELoss()}\n\nhyperparams_melody = {\"batch_size\": 128, # GTX 1050 limits us to <512\n \"epochs\": 25,\n \"learning_rate\": 0.003,\n \"loss_function\": nn.BCELoss()}\n\n\ndef generate_split_indices(data, partition_ratios=None, mode=\"mixed\", seed=None):\n # Make a random set of shuffled indices for sampling training/test sets randomly w/o overlap\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == \"segment-instruments-random\":\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n # Iterate through instruments and add them to the training/validation set indices until ratios are reached\n next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices))/len(data) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(next_instrument_indices))/len(data) \\\n <= partition_ratios[0] + partition_ratios[1] \\\n and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument == instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n\n elif mode == \"segment-instruments-random-balanced\":\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train, next_instrument_indices)\n elif i < num_train_instruments+num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1: # Combine val and test sets if no test set required\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n\n elif mode == \"segment-instruments-manual\":\n # train_instruments = [\"AkPnBcht\", \"AkPnBsdf\", \"grand-closed\", \"grand-removed\", \"grand-open\",\n # \"upright-open\", \"upright-semiopen\", \"upright-closed\"]\n # val_instruments = [\"StbgTGd2\", \"AkPnCGdD\", \"ENSTDkCl\"]\n # test_instruments = [\"AkPnStgb\", \"SptkBGAm\", \"ENSTDkAm\"]\n # train_instruments = [\"Nord_BrightGrand-XL\", \"Nord_AmberUpright-XL\",\n # \"Nord_ConcertGrand1Amb-Lrg\", \"Nord_BabyUpright-XL\",\n # \"Nord_GrandImperial-XL\", \"Nord_BlackUpright-Lrg\",\n # \"Nord_GrandLadyD-Lrg\", \"Nord_BlueSwede-Lrg\",\n # \"Nord_RoyalGrand3D-XL\", \"Nord_MellowUpright-XL\",\n # \"Nord_SilverGrand-XL\", \"Nord_QueenUpright-Lrg\",\n # \"Nord_StudioGrand1-Lrg\", \"Nord_RainPiano-Lrg\"]\n # val_instruments = [\"Nord_ItalianGrand-XL\", \"Nord_GrandUpright-XL\",\n # \"Nord_StudioGrand2-Lrg\"]\n # test_instruments = [\"Nord_VelvetGrand-XL\", \"Nord_RomanticUpright-Lrg\",\n # \"Nord_WhiteGrand-XL\", \"Nord_SaloonUpright-Lrg\",\n # \"Nord_ConcertGrand1-Lrg\", \"Nord_BambinoUpright-XL\"]\n train_instruments = [\"Nord_BrightGrand-XL\", \"Nord_AmberUpright-XL\",\n \"Nord_ConcertGrand1-Lrg\", \"Nord_BabyUpright-XL\",\n \"Nord_GrandImperial-XL\", \"Nord_BlackUpright-Lrg\",\n \"Nord_RoyalGrand3D-XL\", \"Nord_MellowUpright-XL\",\n \"Nord_StudioGrand1-Lrg\", \"Nord_RainPiano-Lrg\",\n \"Nord_WhiteGrand-XL\", \"Nord_RomanticUpright-Lrg\",\n \"Nord_VelvetGrand-XL\", \"Nord_GrandUpright-XL\",\n \"Nord_StudioGrand2-Lrg\", \"Nord_SaloonUpright-Lrg\",\n \"Nord_ItalianGrand-XL\", \"Nord_BlueSwede-Lrg\"]\n val_instruments = [\"Nord_ConcertGrand1Amb-Lrg\", \"Nord_BambinoUpright-XL\",\n \"Nord_GrandLadyD-Lrg\", \"Nord_QueenUpright-Lrg\",\n \"Nord_SilverGrand-XL\"]\n test_instruments = []\n\n indices_train = np.asarray(data.instrument.isin(train_instruments)).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n\n elif mode == \"segment-velocities\":\n indices_train = np.asarray(data.velocity == \"M\").nonzero()[0]\n indices_val = np.asarray(data.velocity == \"P\").nonzero()[0]\n indices_test = np.asarray(data.velocity == \"F\").nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == \"mixed\":\n # Reproducible random shuffle of indices, using a fixed seed\n indices = np.arange(len(data))\n rng.shuffle(indices)\n\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) * partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n\n else:\n raise Exception(\"Mode not recognised\")\n\n # Print training, validation and test set statistics\n print(\"\")\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), \"training samples\")\n print(len(indices_val), \"validation samples\")\n print(len(indices_test), \"test samples\")\n train_class_balance = data.iloc[indices_train].label.sum(axis=0)/len(indices_train)\n print(\"Train set contains\", np.round(train_class_balance * 100), \"% Upright pianos\")\n if mode == \"segment_instruments\":\n print(\"\\t\", pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0)/len(indices_val)\n print(\"Validation set contains\", np.round(val_class_balance * 100), \"% Upright pianos\")\n if mode == \"segment_instruments\":\n print(\"\\t\", pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0)/len(indices_test)\n print(\"Test set contains\", np.round(test_class_balance * 100), \"% Upright pianos\")\n if mode == \"segment_instruments\":\n print(\"\\t\", pd.unique(data.iloc[indices_test].instrument))\n print(\"Overall, dataset contains\", np.round(100 * data.label.sum(axis=0)/len(data)), \"% Upright pianos\")\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique())/folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique())/folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique())/folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique())/folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1, next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2, next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3, next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4, next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5, next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument == instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument == instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1, next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2, next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3, next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4, next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), \"samples in fold 1\")\n print(\"\\t\", pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), \"samples in fold 2\")\n print(\"\\t\", pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), \"samples in fold 3\")\n print(\"\\t\", pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), \"samples in fold 4\")\n print(\"\\t\", pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), \"samples in fold 5\")\n print(\"\\t\", pd.unique(data.iloc[indices_fold5].instrument))\n\n return indices_fold1, indices_fold2, indices_fold3, indices_fold4, indices_fold5\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None, plot=True, plot_title=\"\", verbose=True):\n if verbose:\n print(\"\\n--------------TRAINING MODEL--------------\")\n print(timbre_CNN_type.__name__, \"with parameters:\")\n print(params)\n # Unpack the hyperparameters\n batch_size = params[\"batch_size\"]\n epochs = params[\"epochs\"]\n learning_rate = params[\"learning_rate\"]\n loss_function = params[\"loss_function\"]\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle=False,\n sampler=sampler.SubsetRandomSampler(train_ind),\n pin_memory=True)\n\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n # Record training loss\n mean_epoch_loss = (running_loss/(batch_size*(i+1))).item()\n if verbose:\n print(\"+Training - Epoch\", epoch+1, \"loss:\", mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n\n # Calculate loss on validation set\n if (epoch == epochs-1 or epoch % val_interval == 0) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item()\n print(\"\\t+Validation - Epoch\", epoch + 1, \"loss:\", mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch+1)\n\n # Plot training curves\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs+1))\n plt.grid()\n plt.title(\"Loss curve over \"+str(epochs)+\" epochs of training - \"+plot_title)\n plt.tight_layout()\n plt.show()\n\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n # Inference mode\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n #print(\"+Evaluating - Batch loss:\", loss_function(y, label).item())\n pred = torch.round(y)\n # Accumulate per-batch ground truths, outputs and instrument names\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n # Calculate scores per instrument\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n # Ignore Confusion matrix, balanced accuracy and F1 score which are irrelevant here\n instrument_scores = evaluate_scores(labels_total[instrument_mask], preds_total[instrument_mask])\n piano_class = \"Upright\" if labels_total[instrument_mask][0] else \"Grand\"\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(instrument_scores[\"Accuracy\"],2),piano_class]],\n index=pd.Index([instrument], name=\"Instrument\"),\n columns=[\"Accuracy\", \"Class\"]))\n # Calculate overall scores\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\ndef cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2, partition_mode=None, plot_train_curves=True, verbose=True):\n\n cv_dataset = TimbreDataset(cross_val_subset)\n total_scores = pd.DataFrame()\n\n if cv_folds == 2:\n set_1, set_2, _ = generate_split_indices(cross_val_subset, partition_ratios=[0.5, 0.5], mode=partition_mode)\n training_sets = [set_1, set_2]\n validation_sets = [set_2, set_1]\n elif cv_folds == 4:\n fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4]),\n np.concatenate([fold3, fold4, fold1]),\n np.concatenate([fold4, fold1, fold2]),\n np.concatenate([fold1, fold2, fold3])]\n validation_sets = [fold1, fold2, fold3, fold4]\n elif cv_folds == 5:\n fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4, fold5]),\n np.concatenate([fold3, fold4, fold5, fold1]),\n np.concatenate([fold4, fold5, fold1, fold2]),\n np.concatenate([fold5, fold1, fold2, fold3]),\n np.concatenate([fold1, fold2, fold3, fold4])]\n validation_sets = [fold1, fold2, fold3, fold4, fold5]\n else:\n raise Exception(\"CV mode \"+str(cv_folds)+\" not implemented\")\n\n for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(training_sets, validation_sets)):\n print(\"\\n----------------CV FOLD \"+str(fold+1)+\"-----------------\")\n val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle=False,\n sampler=sampler.SubsetRandomSampler(val_fold_indices), pin_memory=True)\n model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,\n local_dataset=cv_dataset, train_ind=train_fold_indices, val_loader=val_fold,\n plot=plot_train_curves, plot_title=\"CV Fold \"+str(fold+1), verbose=verbose)\n scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)\n if verbose:\n print(\"\\n------Fold \"+str(fold+1)+\" validation set scores--------\")\n print(per_inst_scores_fold)\n display_scores(scores_fold, plot_conf=False)\n numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in scores_fold.items() if k in [\"Accuracy\", \"F1\", \"acc_grand\", \"acc_upright\", \"balanced_acc\", \"min_class_acc\"]})\n numeric_scores_fold[\"no_samples\"] = len(val_fold_indices)\n total_scores = total_scores.append(numeric_scores_fold)\n # Calculate overall cross-validation statistics, weighted by the number of validation samples in each fold\n weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_min_class_acc = (total_scores.min_class_acc * total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=total_scores.no_samples))\n weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores.no_samples))\n weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand, fweights=total_scores.no_samples))\n weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright, fweights=total_scores.no_samples))\n weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc, fweights=total_scores.no_samples))\n weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc, fweights=total_scores.no_samples))\n cv_scores_stats = pd.DataFrame({\"mean\": [weighted_mean_acc, weighted_mean_f1, weighted_mean_acc_grand, weighted_mean_acc_upright, weighted_mean_bal_acc, weighted_mean_min_class_acc],\n \"std\": [weighted_std_acc, weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright, weighted_std_bal_acc, weighted_std_min_class_acc]},\n index=[\"Accuracy\", \"F1\", \"Grand class accuracy\", \"Upright class accuracy\", \"Balanced (macro-avg) accuracy\", \"Min per-class accuracy\"])\n return cv_scores_stats\n\n\ndef hyperparameter_search(cnn_type, training_dataset,\n batch_size_space,\n epochs_space,\n lr_space,\n loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__, \"hyperparam_search.csv\")\n with open(hyp_search_csv, \"a\", newline=\"\") as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([\"----------New Hyperparameter search----------\"])\n writer.writerow([\"Batch size\", \"Epochs\", \"Learning rate\", \"Loss function\"])\n\n total_combinations = len(loss_space)*len(lr_space)*len(epochs_space)*len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print(\"\\n------ Hyperparameter search combination\", i, \"of\", total_combinations, \"------\")\n print(\"Model type:\", cnn_type.__name__)\n hyperparams_local={\"batch_size\": batch_size_local,\n \"epochs\": epochs_local,\n \"learning_rate\": learning_rate_local,\n \"loss_function\": loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type,\n hyparams=hyperparams_local,\n cross_val_subset=training_dataset,\n cv_folds=4,\n partition_mode=\"segment-instruments-random-balanced\",\n plot_train_curves=False,\n verbose=False)\n # Print the results to csv\n with open(hyp_search_csv, \"a\", newline=\"\") as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local, learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode=\"a\")\n # Update best score using the mean over the folds of the minimum single-class accuracy\n min_class_acc_local = cv_results.loc[\"Min per-class accuracy\", \"mean\"]\n # Ensure that the best model achieves better-than-chance macro-avg accuracy, on average across the folds\n bal_acc_local = cv_results.loc[\"Balanced (macro-avg) accuracy\", \"mean\"]\n if min_class_acc_local > best_score and bal_acc_local > 0.5:\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print(\"\\n------New best performing combination------\")\n print(best_params)\n print(\"with stats:\")\n print(best_stats.round(3))\n\n return best_params, best_score, best_stats\n\n\nif __name__ == '__main__':\n # Configure CPU or GPU using CUDA if available\n device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n print('Device:', device)\n if torch.cuda.is_available():\n print(\"GPU:\", torch.cuda.get_device_name(0))\n\n print(\"\\n\\n----------------------LOADING DATA-----------------------\")\n if timbre_CNN_type == SingleNoteTimbreCNN or timbre_CNN_type == SingleNoteTimbreCNNSmall:\n hyperparams = hyperparams_single\n loader = InstrumentLoader(data_dir, note_range=[48, 72], set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess(fmin=20, fmax=20000, n_mels=300, normalisation=\"statistics\")\n elif timbre_CNN_type == MelodyTimbreCNN or timbre_CNN_type == MelodyTimbreCNNSmall:\n hyperparams = hyperparams_melody\n loader = MelodyInstrumentLoader(data_dir, note_range=[48, 72], set_velocity=None, normalise_wavs=True, load_MIDIsampled=True) # Use reload_wavs=False to speed up dataloading if melspecs already generated\n total_data = loader.preprocess_melodies(midi_dir, normalisation=\"statistics\")\n else:\n raise Exception(str(timbre_CNN_type)+\" doesn't exist\")\n # Split into seen and unseen subsets\n data_seen = total_data[total_data.dataset == \"MIDIsampled\"]\n data_unseen = total_data[total_data.dataset != \"MIDIsampled\"]\n gc.collect()\n\n if perform_hyp_search:\n print(\"\\n\\n----------------HYPERPARAMETER SEARCH--------------------\")\n batch_size_space = [64, 128, 256]\n epochs_space = [15, 20, 25]\n lr_space = [0.001, 0.002, 0.003]\n best_params, best_score, best_stats = hyperparameter_search(cnn_type=timbre_CNN_type, training_dataset=data_seen,\n batch_size_space=batch_size_space,\n epochs_space=epochs_space,\n lr_space=lr_space)\n print(\"\\n---------------Hyperparameter search results---------------\")\n print(\"Model type:\", timbre_CNN_type.__name__)\n print(\"Search space:\")\n print(\"\\tBatch sizes:\", batch_size_space)\n print(\"\\tEpochs:\", epochs_space)\n print(\"\\tLearning rates:\", lr_space)\n print(\"Best params\", best_params)\n print(\"Best score\", best_score)\n print(\"Best stats:\")\n print(best_stats)\n if best_params is not None:\n hyperparams = best_params\n\n dataset_seen = TimbreDataset(data_seen)\n train_indices, val_indices, _ = generate_split_indices(data_seen, partition_ratios=[0.8, 0.2],\n mode=\"segment-instruments-manual\")\n if perform_cross_val:\n print(\"\\n\\n---------------------CROSS-VALIDATION---------------------\")\n cv_results = cross_validate(cnn_type=timbre_CNN_type, hyparams=hyperparams,\n cross_val_subset=data_seen, #data_seen.iloc[train_indices],\n cv_folds=4,\n partition_mode=\"segment-instruments-random-balanced\")\n print(\"\\n-------Overall cross-validation scores-------\")\n print(cv_results.round(3))\n\n print(\"\\n\\n-------------------RE-TRAINED MODEL-----------------------\")\n loader_val = DataLoader(dataset_seen, batch_size=evaluation_bs, shuffle=False,\n sampler=sampler.SubsetRandomSampler(val_indices),\n pin_memory=True)\n model_filename = \"model_\"+str(hyperparams[\"batch_size\"])+\"_\"+str(hyperparams[\"epochs\"])+\"_\"+str(hyperparams[\"learning_rate\"])+model_name\n saved_model_path = os.path.join(model_dir, timbre_CNN_type.__name__, model_filename+\".pth\")\n if not os.path.isfile(saved_model_path):\n print(\"\\nCreating and training new model\")\n model, loss_plot = train_model(cnn_type=timbre_CNN_type, params=hyperparams,\n local_dataset=dataset_seen, train_ind=train_indices, val_loader=loader_val,\n plot_title=\"\\n\"+timbre_CNN_type.__name__)\n # Save model\n torch.save(model, saved_model_path)\n print(\"Saved trained model to\", saved_model_path)\n # Save loss plot\n loss_plot.savefig(os.path.join(model_dir, timbre_CNN_type.__name__, model_filename+\".svg\"))\n else:\n print(\"\\nLoading pre-trained model from\", saved_model_path)\n model = torch.load(saved_model_path)\n print(model)\n model.count_parameters()\n\n # print(\"\\n\\n-------------Evaluation on the validation set-------------\")\n # scores_seen, per_inst_scores_seen = evaluate_CNN(model, loader_val)\n # print(\"---------Per-instrument scores---------\")\n # print(per_inst_scores_seen)\n # #per_inst_scores_seen.to_csv(os.path.join(result_dir, timbre_CNN_type.__name__, model_filename + \".csv\"))\n # print(\"---Overall validation set performance---\")\n # display_scores(scores_seen, \"Validation set\")\n\n print(\"\\n\\n--------------Evaluation on the unseen set---------------\")\n dataset_unseen = TimbreDataset(data_unseen)\n loader_unseen = DataLoader(dataset_unseen, batch_size=evaluation_bs, shuffle=False, pin_memory=True)\n scores_unseen, per_inst_scores_unseen = evaluate_CNN(model, loader_unseen)\n print(\"---------Per-instrument scores---------\")\n print(per_inst_scores_unseen)\n per_inst_scores_unseen.to_csv(os.path.join(result_dir, timbre_CNN_type.__name__, model_filename + \".csv\"), mode=\"a\")\n print(\"--------Overall unseen set performance--------\")\n display_scores(scores_unseen, \"Unseen test set\\n\"+timbre_CNN_type.__name__)\n\n",
"import csv\nimport warnings\nimport sklearn\nimport pandas as pd\nimport gc\nfrom data_loading import *\nfrom timbre_CNN import *\nfrom evaluation import *\nfrom torch.utils.data import DataLoader, sampler\nfrom melody_loading import *\nresult_dir = 'results'\nmodel_dir = 'models'\nmodel_name = '_retrained'\nval_interval = 5\nperform_hyp_search = False\nperform_cross_val = False\nevaluation_bs = 256\ntimbre_CNN_type = SingleNoteTimbreCNNSmall\nhyperparams_single = {'batch_size': 64, 'epochs': 20, 'learning_rate': \n 0.002, 'loss_function': nn.BCELoss()}\nhyperparams_melody = {'batch_size': 128, 'epochs': 25, 'learning_rate': \n 0.003, 'loss_function': nn.BCELoss()}\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique()) / folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5,\n next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), 'samples in fold 1')\n print('\\t', pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), 'samples in fold 2')\n print('\\t', pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), 'samples in fold 3')\n print('\\t', pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), 'samples in fold 4')\n print('\\t', pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), 'samples in fold 5')\n print('\\t', pd.unique(data.iloc[indices_fold5].instrument))\n return (indices_fold1, indices_fold2, indices_fold3, indices_fold4,\n indices_fold5)\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\ndef cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2,\n partition_mode=None, plot_train_curves=True, verbose=True):\n cv_dataset = TimbreDataset(cross_val_subset)\n total_scores = pd.DataFrame()\n if cv_folds == 2:\n set_1, set_2, _ = generate_split_indices(cross_val_subset,\n partition_ratios=[0.5, 0.5], mode=partition_mode)\n training_sets = [set_1, set_2]\n validation_sets = [set_2, set_1]\n elif cv_folds == 4:\n fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4]), np.\n concatenate([fold3, fold4, fold1]), np.concatenate([fold4,\n fold1, fold2]), np.concatenate([fold1, fold2, fold3])]\n validation_sets = [fold1, fold2, fold3, fold4]\n elif cv_folds == 5:\n fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4, fold5]), np.\n concatenate([fold3, fold4, fold5, fold1]), np.concatenate([\n fold4, fold5, fold1, fold2]), np.concatenate([fold5, fold1,\n fold2, fold3]), np.concatenate([fold1, fold2, fold3, fold4])]\n validation_sets = [fold1, fold2, fold3, fold4, fold5]\n else:\n raise Exception('CV mode ' + str(cv_folds) + ' not implemented')\n for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(\n training_sets, validation_sets)):\n print('\\n----------------CV FOLD ' + str(fold + 1) +\n '-----------------')\n val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_fold_indices),\n pin_memory=True)\n model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,\n local_dataset=cv_dataset, train_ind=train_fold_indices,\n val_loader=val_fold, plot=plot_train_curves, plot_title=\n 'CV Fold ' + str(fold + 1), verbose=verbose)\n scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)\n if verbose:\n print('\\n------Fold ' + str(fold + 1) +\n ' validation set scores--------')\n print(per_inst_scores_fold)\n display_scores(scores_fold, plot_conf=False)\n numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in\n scores_fold.items() if k in ['Accuracy', 'F1', 'acc_grand',\n 'acc_upright', 'balanced_acc', 'min_class_acc']})\n numeric_scores_fold['no_samples'] = len(val_fold_indices)\n total_scores = total_scores.append(numeric_scores_fold)\n weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples\n ).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_min_class_acc = (total_scores.min_class_acc *\n total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=\n total_scores.no_samples))\n weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores\n .no_samples))\n weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand,\n fweights=total_scores.no_samples))\n weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright,\n fweights=total_scores.no_samples))\n weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc,\n fweights=total_scores.no_samples))\n weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc,\n fweights=total_scores.no_samples))\n cv_scores_stats = pd.DataFrame({'mean': [weighted_mean_acc,\n weighted_mean_f1, weighted_mean_acc_grand,\n weighted_mean_acc_upright, weighted_mean_bal_acc,\n weighted_mean_min_class_acc], 'std': [weighted_std_acc,\n weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright,\n weighted_std_bal_acc, weighted_std_min_class_acc]}, index=[\n 'Accuracy', 'F1', 'Grand class accuracy', 'Upright class accuracy',\n 'Balanced (macro-avg) accuracy', 'Min per-class accuracy'])\n return cv_scores_stats\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\nif __name__ == '__main__':\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print('Device:', device)\n if torch.cuda.is_available():\n print('GPU:', torch.cuda.get_device_name(0))\n print('\\n\\n----------------------LOADING DATA-----------------------')\n if (timbre_CNN_type == SingleNoteTimbreCNN or timbre_CNN_type ==\n SingleNoteTimbreCNNSmall):\n hyperparams = hyperparams_single\n loader = InstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess(fmin=20, fmax=20000, n_mels=300,\n normalisation='statistics')\n elif timbre_CNN_type == MelodyTimbreCNN or timbre_CNN_type == MelodyTimbreCNNSmall:\n hyperparams = hyperparams_melody\n loader = MelodyInstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess_melodies(midi_dir, normalisation=\n 'statistics')\n else:\n raise Exception(str(timbre_CNN_type) + \" doesn't exist\")\n data_seen = total_data[total_data.dataset == 'MIDIsampled']\n data_unseen = total_data[total_data.dataset != 'MIDIsampled']\n gc.collect()\n if perform_hyp_search:\n print('\\n\\n----------------HYPERPARAMETER SEARCH--------------------')\n batch_size_space = [64, 128, 256]\n epochs_space = [15, 20, 25]\n lr_space = [0.001, 0.002, 0.003]\n best_params, best_score, best_stats = hyperparameter_search(cnn_type\n =timbre_CNN_type, training_dataset=data_seen, batch_size_space=\n batch_size_space, epochs_space=epochs_space, lr_space=lr_space)\n print('\\n---------------Hyperparameter search results---------------')\n print('Model type:', timbre_CNN_type.__name__)\n print('Search space:')\n print('\\tBatch sizes:', batch_size_space)\n print('\\tEpochs:', epochs_space)\n print('\\tLearning rates:', lr_space)\n print('Best params', best_params)\n print('Best score', best_score)\n print('Best stats:')\n print(best_stats)\n if best_params is not None:\n hyperparams = best_params\n dataset_seen = TimbreDataset(data_seen)\n train_indices, val_indices, _ = generate_split_indices(data_seen,\n partition_ratios=[0.8, 0.2], mode='segment-instruments-manual')\n if perform_cross_val:\n print('\\n\\n---------------------CROSS-VALIDATION---------------------')\n cv_results = cross_validate(cnn_type=timbre_CNN_type, hyparams=\n hyperparams, cross_val_subset=data_seen, cv_folds=4,\n partition_mode='segment-instruments-random-balanced')\n print('\\n-------Overall cross-validation scores-------')\n print(cv_results.round(3))\n print('\\n\\n-------------------RE-TRAINED MODEL-----------------------')\n loader_val = DataLoader(dataset_seen, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_indices),\n pin_memory=True)\n model_filename = 'model_' + str(hyperparams['batch_size']) + '_' + str(\n hyperparams['epochs']) + '_' + str(hyperparams['learning_rate']\n ) + model_name\n saved_model_path = os.path.join(model_dir, timbre_CNN_type.__name__, \n model_filename + '.pth')\n if not os.path.isfile(saved_model_path):\n print('\\nCreating and training new model')\n model, loss_plot = train_model(cnn_type=timbre_CNN_type, params=\n hyperparams, local_dataset=dataset_seen, train_ind=\n train_indices, val_loader=loader_val, plot_title='\\n' +\n timbre_CNN_type.__name__)\n torch.save(model, saved_model_path)\n print('Saved trained model to', saved_model_path)\n loss_plot.savefig(os.path.join(model_dir, timbre_CNN_type.__name__,\n model_filename + '.svg'))\n else:\n print('\\nLoading pre-trained model from', saved_model_path)\n model = torch.load(saved_model_path)\n print(model)\n model.count_parameters()\n print('\\n\\n--------------Evaluation on the unseen set---------------')\n dataset_unseen = TimbreDataset(data_unseen)\n loader_unseen = DataLoader(dataset_unseen, batch_size=evaluation_bs,\n shuffle=False, pin_memory=True)\n scores_unseen, per_inst_scores_unseen = evaluate_CNN(model, loader_unseen)\n print('---------Per-instrument scores---------')\n print(per_inst_scores_unseen)\n per_inst_scores_unseen.to_csv(os.path.join(result_dir, timbre_CNN_type.\n __name__, model_filename + '.csv'), mode='a')\n print('--------Overall unseen set performance--------')\n display_scores(scores_unseen, 'Unseen test set\\n' + timbre_CNN_type.\n __name__)\n",
"<import token>\nresult_dir = 'results'\nmodel_dir = 'models'\nmodel_name = '_retrained'\nval_interval = 5\nperform_hyp_search = False\nperform_cross_val = False\nevaluation_bs = 256\ntimbre_CNN_type = SingleNoteTimbreCNNSmall\nhyperparams_single = {'batch_size': 64, 'epochs': 20, 'learning_rate': \n 0.002, 'loss_function': nn.BCELoss()}\nhyperparams_melody = {'batch_size': 128, 'epochs': 25, 'learning_rate': \n 0.003, 'loss_function': nn.BCELoss()}\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique()) / folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5,\n next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), 'samples in fold 1')\n print('\\t', pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), 'samples in fold 2')\n print('\\t', pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), 'samples in fold 3')\n print('\\t', pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), 'samples in fold 4')\n print('\\t', pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), 'samples in fold 5')\n print('\\t', pd.unique(data.iloc[indices_fold5].instrument))\n return (indices_fold1, indices_fold2, indices_fold3, indices_fold4,\n indices_fold5)\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\ndef cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2,\n partition_mode=None, plot_train_curves=True, verbose=True):\n cv_dataset = TimbreDataset(cross_val_subset)\n total_scores = pd.DataFrame()\n if cv_folds == 2:\n set_1, set_2, _ = generate_split_indices(cross_val_subset,\n partition_ratios=[0.5, 0.5], mode=partition_mode)\n training_sets = [set_1, set_2]\n validation_sets = [set_2, set_1]\n elif cv_folds == 4:\n fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4]), np.\n concatenate([fold3, fold4, fold1]), np.concatenate([fold4,\n fold1, fold2]), np.concatenate([fold1, fold2, fold3])]\n validation_sets = [fold1, fold2, fold3, fold4]\n elif cv_folds == 5:\n fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4, fold5]), np.\n concatenate([fold3, fold4, fold5, fold1]), np.concatenate([\n fold4, fold5, fold1, fold2]), np.concatenate([fold5, fold1,\n fold2, fold3]), np.concatenate([fold1, fold2, fold3, fold4])]\n validation_sets = [fold1, fold2, fold3, fold4, fold5]\n else:\n raise Exception('CV mode ' + str(cv_folds) + ' not implemented')\n for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(\n training_sets, validation_sets)):\n print('\\n----------------CV FOLD ' + str(fold + 1) +\n '-----------------')\n val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_fold_indices),\n pin_memory=True)\n model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,\n local_dataset=cv_dataset, train_ind=train_fold_indices,\n val_loader=val_fold, plot=plot_train_curves, plot_title=\n 'CV Fold ' + str(fold + 1), verbose=verbose)\n scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)\n if verbose:\n print('\\n------Fold ' + str(fold + 1) +\n ' validation set scores--------')\n print(per_inst_scores_fold)\n display_scores(scores_fold, plot_conf=False)\n numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in\n scores_fold.items() if k in ['Accuracy', 'F1', 'acc_grand',\n 'acc_upright', 'balanced_acc', 'min_class_acc']})\n numeric_scores_fold['no_samples'] = len(val_fold_indices)\n total_scores = total_scores.append(numeric_scores_fold)\n weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples\n ).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_min_class_acc = (total_scores.min_class_acc *\n total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=\n total_scores.no_samples))\n weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores\n .no_samples))\n weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand,\n fweights=total_scores.no_samples))\n weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright,\n fweights=total_scores.no_samples))\n weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc,\n fweights=total_scores.no_samples))\n weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc,\n fweights=total_scores.no_samples))\n cv_scores_stats = pd.DataFrame({'mean': [weighted_mean_acc,\n weighted_mean_f1, weighted_mean_acc_grand,\n weighted_mean_acc_upright, weighted_mean_bal_acc,\n weighted_mean_min_class_acc], 'std': [weighted_std_acc,\n weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright,\n weighted_std_bal_acc, weighted_std_min_class_acc]}, index=[\n 'Accuracy', 'F1', 'Grand class accuracy', 'Upright class accuracy',\n 'Balanced (macro-avg) accuracy', 'Min per-class accuracy'])\n return cv_scores_stats\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\nif __name__ == '__main__':\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print('Device:', device)\n if torch.cuda.is_available():\n print('GPU:', torch.cuda.get_device_name(0))\n print('\\n\\n----------------------LOADING DATA-----------------------')\n if (timbre_CNN_type == SingleNoteTimbreCNN or timbre_CNN_type ==\n SingleNoteTimbreCNNSmall):\n hyperparams = hyperparams_single\n loader = InstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess(fmin=20, fmax=20000, n_mels=300,\n normalisation='statistics')\n elif timbre_CNN_type == MelodyTimbreCNN or timbre_CNN_type == MelodyTimbreCNNSmall:\n hyperparams = hyperparams_melody\n loader = MelodyInstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess_melodies(midi_dir, normalisation=\n 'statistics')\n else:\n raise Exception(str(timbre_CNN_type) + \" doesn't exist\")\n data_seen = total_data[total_data.dataset == 'MIDIsampled']\n data_unseen = total_data[total_data.dataset != 'MIDIsampled']\n gc.collect()\n if perform_hyp_search:\n print('\\n\\n----------------HYPERPARAMETER SEARCH--------------------')\n batch_size_space = [64, 128, 256]\n epochs_space = [15, 20, 25]\n lr_space = [0.001, 0.002, 0.003]\n best_params, best_score, best_stats = hyperparameter_search(cnn_type\n =timbre_CNN_type, training_dataset=data_seen, batch_size_space=\n batch_size_space, epochs_space=epochs_space, lr_space=lr_space)\n print('\\n---------------Hyperparameter search results---------------')\n print('Model type:', timbre_CNN_type.__name__)\n print('Search space:')\n print('\\tBatch sizes:', batch_size_space)\n print('\\tEpochs:', epochs_space)\n print('\\tLearning rates:', lr_space)\n print('Best params', best_params)\n print('Best score', best_score)\n print('Best stats:')\n print(best_stats)\n if best_params is not None:\n hyperparams = best_params\n dataset_seen = TimbreDataset(data_seen)\n train_indices, val_indices, _ = generate_split_indices(data_seen,\n partition_ratios=[0.8, 0.2], mode='segment-instruments-manual')\n if perform_cross_val:\n print('\\n\\n---------------------CROSS-VALIDATION---------------------')\n cv_results = cross_validate(cnn_type=timbre_CNN_type, hyparams=\n hyperparams, cross_val_subset=data_seen, cv_folds=4,\n partition_mode='segment-instruments-random-balanced')\n print('\\n-------Overall cross-validation scores-------')\n print(cv_results.round(3))\n print('\\n\\n-------------------RE-TRAINED MODEL-----------------------')\n loader_val = DataLoader(dataset_seen, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_indices),\n pin_memory=True)\n model_filename = 'model_' + str(hyperparams['batch_size']) + '_' + str(\n hyperparams['epochs']) + '_' + str(hyperparams['learning_rate']\n ) + model_name\n saved_model_path = os.path.join(model_dir, timbre_CNN_type.__name__, \n model_filename + '.pth')\n if not os.path.isfile(saved_model_path):\n print('\\nCreating and training new model')\n model, loss_plot = train_model(cnn_type=timbre_CNN_type, params=\n hyperparams, local_dataset=dataset_seen, train_ind=\n train_indices, val_loader=loader_val, plot_title='\\n' +\n timbre_CNN_type.__name__)\n torch.save(model, saved_model_path)\n print('Saved trained model to', saved_model_path)\n loss_plot.savefig(os.path.join(model_dir, timbre_CNN_type.__name__,\n model_filename + '.svg'))\n else:\n print('\\nLoading pre-trained model from', saved_model_path)\n model = torch.load(saved_model_path)\n print(model)\n model.count_parameters()\n print('\\n\\n--------------Evaluation on the unseen set---------------')\n dataset_unseen = TimbreDataset(data_unseen)\n loader_unseen = DataLoader(dataset_unseen, batch_size=evaluation_bs,\n shuffle=False, pin_memory=True)\n scores_unseen, per_inst_scores_unseen = evaluate_CNN(model, loader_unseen)\n print('---------Per-instrument scores---------')\n print(per_inst_scores_unseen)\n per_inst_scores_unseen.to_csv(os.path.join(result_dir, timbre_CNN_type.\n __name__, model_filename + '.csv'), mode='a')\n print('--------Overall unseen set performance--------')\n display_scores(scores_unseen, 'Unseen test set\\n' + timbre_CNN_type.\n __name__)\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique()) / folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5,\n next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), 'samples in fold 1')\n print('\\t', pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), 'samples in fold 2')\n print('\\t', pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), 'samples in fold 3')\n print('\\t', pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), 'samples in fold 4')\n print('\\t', pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), 'samples in fold 5')\n print('\\t', pd.unique(data.iloc[indices_fold5].instrument))\n return (indices_fold1, indices_fold2, indices_fold3, indices_fold4,\n indices_fold5)\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\ndef cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2,\n partition_mode=None, plot_train_curves=True, verbose=True):\n cv_dataset = TimbreDataset(cross_val_subset)\n total_scores = pd.DataFrame()\n if cv_folds == 2:\n set_1, set_2, _ = generate_split_indices(cross_val_subset,\n partition_ratios=[0.5, 0.5], mode=partition_mode)\n training_sets = [set_1, set_2]\n validation_sets = [set_2, set_1]\n elif cv_folds == 4:\n fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4]), np.\n concatenate([fold3, fold4, fold1]), np.concatenate([fold4,\n fold1, fold2]), np.concatenate([fold1, fold2, fold3])]\n validation_sets = [fold1, fold2, fold3, fold4]\n elif cv_folds == 5:\n fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4, fold5]), np.\n concatenate([fold3, fold4, fold5, fold1]), np.concatenate([\n fold4, fold5, fold1, fold2]), np.concatenate([fold5, fold1,\n fold2, fold3]), np.concatenate([fold1, fold2, fold3, fold4])]\n validation_sets = [fold1, fold2, fold3, fold4, fold5]\n else:\n raise Exception('CV mode ' + str(cv_folds) + ' not implemented')\n for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(\n training_sets, validation_sets)):\n print('\\n----------------CV FOLD ' + str(fold + 1) +\n '-----------------')\n val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_fold_indices),\n pin_memory=True)\n model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,\n local_dataset=cv_dataset, train_ind=train_fold_indices,\n val_loader=val_fold, plot=plot_train_curves, plot_title=\n 'CV Fold ' + str(fold + 1), verbose=verbose)\n scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)\n if verbose:\n print('\\n------Fold ' + str(fold + 1) +\n ' validation set scores--------')\n print(per_inst_scores_fold)\n display_scores(scores_fold, plot_conf=False)\n numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in\n scores_fold.items() if k in ['Accuracy', 'F1', 'acc_grand',\n 'acc_upright', 'balanced_acc', 'min_class_acc']})\n numeric_scores_fold['no_samples'] = len(val_fold_indices)\n total_scores = total_scores.append(numeric_scores_fold)\n weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples\n ).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_min_class_acc = (total_scores.min_class_acc *\n total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=\n total_scores.no_samples))\n weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores\n .no_samples))\n weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand,\n fweights=total_scores.no_samples))\n weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright,\n fweights=total_scores.no_samples))\n weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc,\n fweights=total_scores.no_samples))\n weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc,\n fweights=total_scores.no_samples))\n cv_scores_stats = pd.DataFrame({'mean': [weighted_mean_acc,\n weighted_mean_f1, weighted_mean_acc_grand,\n weighted_mean_acc_upright, weighted_mean_bal_acc,\n weighted_mean_min_class_acc], 'std': [weighted_std_acc,\n weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright,\n weighted_std_bal_acc, weighted_std_min_class_acc]}, index=[\n 'Accuracy', 'F1', 'Grand class accuracy', 'Upright class accuracy',\n 'Balanced (macro-avg) accuracy', 'Min per-class accuracy'])\n return cv_scores_stats\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\nif __name__ == '__main__':\n device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n print('Device:', device)\n if torch.cuda.is_available():\n print('GPU:', torch.cuda.get_device_name(0))\n print('\\n\\n----------------------LOADING DATA-----------------------')\n if (timbre_CNN_type == SingleNoteTimbreCNN or timbre_CNN_type ==\n SingleNoteTimbreCNNSmall):\n hyperparams = hyperparams_single\n loader = InstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess(fmin=20, fmax=20000, n_mels=300,\n normalisation='statistics')\n elif timbre_CNN_type == MelodyTimbreCNN or timbre_CNN_type == MelodyTimbreCNNSmall:\n hyperparams = hyperparams_melody\n loader = MelodyInstrumentLoader(data_dir, note_range=[48, 72],\n set_velocity=None, normalise_wavs=True, load_MIDIsampled=True)\n total_data = loader.preprocess_melodies(midi_dir, normalisation=\n 'statistics')\n else:\n raise Exception(str(timbre_CNN_type) + \" doesn't exist\")\n data_seen = total_data[total_data.dataset == 'MIDIsampled']\n data_unseen = total_data[total_data.dataset != 'MIDIsampled']\n gc.collect()\n if perform_hyp_search:\n print('\\n\\n----------------HYPERPARAMETER SEARCH--------------------')\n batch_size_space = [64, 128, 256]\n epochs_space = [15, 20, 25]\n lr_space = [0.001, 0.002, 0.003]\n best_params, best_score, best_stats = hyperparameter_search(cnn_type\n =timbre_CNN_type, training_dataset=data_seen, batch_size_space=\n batch_size_space, epochs_space=epochs_space, lr_space=lr_space)\n print('\\n---------------Hyperparameter search results---------------')\n print('Model type:', timbre_CNN_type.__name__)\n print('Search space:')\n print('\\tBatch sizes:', batch_size_space)\n print('\\tEpochs:', epochs_space)\n print('\\tLearning rates:', lr_space)\n print('Best params', best_params)\n print('Best score', best_score)\n print('Best stats:')\n print(best_stats)\n if best_params is not None:\n hyperparams = best_params\n dataset_seen = TimbreDataset(data_seen)\n train_indices, val_indices, _ = generate_split_indices(data_seen,\n partition_ratios=[0.8, 0.2], mode='segment-instruments-manual')\n if perform_cross_val:\n print('\\n\\n---------------------CROSS-VALIDATION---------------------')\n cv_results = cross_validate(cnn_type=timbre_CNN_type, hyparams=\n hyperparams, cross_val_subset=data_seen, cv_folds=4,\n partition_mode='segment-instruments-random-balanced')\n print('\\n-------Overall cross-validation scores-------')\n print(cv_results.round(3))\n print('\\n\\n-------------------RE-TRAINED MODEL-----------------------')\n loader_val = DataLoader(dataset_seen, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_indices),\n pin_memory=True)\n model_filename = 'model_' + str(hyperparams['batch_size']) + '_' + str(\n hyperparams['epochs']) + '_' + str(hyperparams['learning_rate']\n ) + model_name\n saved_model_path = os.path.join(model_dir, timbre_CNN_type.__name__, \n model_filename + '.pth')\n if not os.path.isfile(saved_model_path):\n print('\\nCreating and training new model')\n model, loss_plot = train_model(cnn_type=timbre_CNN_type, params=\n hyperparams, local_dataset=dataset_seen, train_ind=\n train_indices, val_loader=loader_val, plot_title='\\n' +\n timbre_CNN_type.__name__)\n torch.save(model, saved_model_path)\n print('Saved trained model to', saved_model_path)\n loss_plot.savefig(os.path.join(model_dir, timbre_CNN_type.__name__,\n model_filename + '.svg'))\n else:\n print('\\nLoading pre-trained model from', saved_model_path)\n model = torch.load(saved_model_path)\n print(model)\n model.count_parameters()\n print('\\n\\n--------------Evaluation on the unseen set---------------')\n dataset_unseen = TimbreDataset(data_unseen)\n loader_unseen = DataLoader(dataset_unseen, batch_size=evaluation_bs,\n shuffle=False, pin_memory=True)\n scores_unseen, per_inst_scores_unseen = evaluate_CNN(model, loader_unseen)\n print('---------Per-instrument scores---------')\n print(per_inst_scores_unseen)\n per_inst_scores_unseen.to_csv(os.path.join(result_dir, timbre_CNN_type.\n __name__, model_filename + '.csv'), mode='a')\n print('--------Overall unseen set performance--------')\n display_scores(scores_unseen, 'Unseen test set\\n' + timbre_CNN_type.\n __name__)\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique()) / folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5,\n next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), 'samples in fold 1')\n print('\\t', pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), 'samples in fold 2')\n print('\\t', pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), 'samples in fold 3')\n print('\\t', pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), 'samples in fold 4')\n print('\\t', pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), 'samples in fold 5')\n print('\\t', pd.unique(data.iloc[indices_fold5].instrument))\n return (indices_fold1, indices_fold2, indices_fold3, indices_fold4,\n indices_fold5)\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\ndef cross_validate(cnn_type, hyparams, cross_val_subset, cv_folds=2,\n partition_mode=None, plot_train_curves=True, verbose=True):\n cv_dataset = TimbreDataset(cross_val_subset)\n total_scores = pd.DataFrame()\n if cv_folds == 2:\n set_1, set_2, _ = generate_split_indices(cross_val_subset,\n partition_ratios=[0.5, 0.5], mode=partition_mode)\n training_sets = [set_1, set_2]\n validation_sets = [set_2, set_1]\n elif cv_folds == 4:\n fold1, fold2, fold3, fold4, _ = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4]), np.\n concatenate([fold3, fold4, fold1]), np.concatenate([fold4,\n fold1, fold2]), np.concatenate([fold1, fold2, fold3])]\n validation_sets = [fold1, fold2, fold3, fold4]\n elif cv_folds == 5:\n fold1, fold2, fold3, fold4, fold5 = generate_crossval_fold_indices(\n cross_val_subset, folds=cv_folds, seed=None, verbose=verbose)\n training_sets = [np.concatenate([fold2, fold3, fold4, fold5]), np.\n concatenate([fold3, fold4, fold5, fold1]), np.concatenate([\n fold4, fold5, fold1, fold2]), np.concatenate([fold5, fold1,\n fold2, fold3]), np.concatenate([fold1, fold2, fold3, fold4])]\n validation_sets = [fold1, fold2, fold3, fold4, fold5]\n else:\n raise Exception('CV mode ' + str(cv_folds) + ' not implemented')\n for fold, (train_fold_indices, val_fold_indices) in enumerate(zip(\n training_sets, validation_sets)):\n print('\\n----------------CV FOLD ' + str(fold + 1) +\n '-----------------')\n val_fold = DataLoader(cv_dataset, batch_size=evaluation_bs, shuffle\n =False, sampler=sampler.SubsetRandomSampler(val_fold_indices),\n pin_memory=True)\n model_fold, _ = train_model(cnn_type=cnn_type, params=hyparams,\n local_dataset=cv_dataset, train_ind=train_fold_indices,\n val_loader=val_fold, plot=plot_train_curves, plot_title=\n 'CV Fold ' + str(fold + 1), verbose=verbose)\n scores_fold, per_inst_scores_fold = evaluate_CNN(model_fold, val_fold)\n if verbose:\n print('\\n------Fold ' + str(fold + 1) +\n ' validation set scores--------')\n print(per_inst_scores_fold)\n display_scores(scores_fold, plot_conf=False)\n numeric_scores_fold = pd.DataFrame.from_dict({k: [v] for k, v in\n scores_fold.items() if k in ['Accuracy', 'F1', 'acc_grand',\n 'acc_upright', 'balanced_acc', 'min_class_acc']})\n numeric_scores_fold['no_samples'] = len(val_fold_indices)\n total_scores = total_scores.append(numeric_scores_fold)\n weighted_mean_acc = (total_scores.Accuracy * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_f1 = (total_scores.F1 * total_scores.no_samples).sum(\n ) / total_scores.no_samples.sum()\n weighted_mean_acc_grand = (total_scores.acc_grand * total_scores.no_samples\n ).sum() / total_scores.no_samples.sum()\n weighted_mean_acc_upright = (total_scores.acc_upright * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_bal_acc = (total_scores.balanced_acc * total_scores.\n no_samples).sum() / total_scores.no_samples.sum()\n weighted_mean_min_class_acc = (total_scores.min_class_acc *\n total_scores.no_samples).sum() / total_scores.no_samples.sum()\n weighted_std_acc = np.sqrt(np.cov(total_scores.Accuracy, fweights=\n total_scores.no_samples))\n weighted_std_f1 = np.sqrt(np.cov(total_scores.F1, fweights=total_scores\n .no_samples))\n weighted_std_acc_grand = np.sqrt(np.cov(total_scores.acc_grand,\n fweights=total_scores.no_samples))\n weighted_std_acc_upright = np.sqrt(np.cov(total_scores.acc_upright,\n fweights=total_scores.no_samples))\n weighted_std_bal_acc = np.sqrt(np.cov(total_scores.balanced_acc,\n fweights=total_scores.no_samples))\n weighted_std_min_class_acc = np.sqrt(np.cov(total_scores.min_class_acc,\n fweights=total_scores.no_samples))\n cv_scores_stats = pd.DataFrame({'mean': [weighted_mean_acc,\n weighted_mean_f1, weighted_mean_acc_grand,\n weighted_mean_acc_upright, weighted_mean_bal_acc,\n weighted_mean_min_class_acc], 'std': [weighted_std_acc,\n weighted_std_f1, weighted_std_acc_grand, weighted_std_acc_upright,\n weighted_std_bal_acc, weighted_std_min_class_acc]}, index=[\n 'Accuracy', 'F1', 'Grand class accuracy', 'Upright class accuracy',\n 'Balanced (macro-avg) accuracy', 'Min per-class accuracy'])\n return cv_scores_stats\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\ndef generate_crossval_fold_indices(data, seed=None, folds=5, verbose=True):\n rng = np.random.default_rng(seed=seed)\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_instruments_fold1 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold2 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold3 = np.round(len(data.instrument.unique()) / folds)\n num_instruments_fold4 = np.round(len(data.instrument.unique()) / folds)\n indices_fold1 = []\n indices_fold2 = []\n indices_fold3 = []\n indices_fold4 = []\n indices_fold5 = []\n i_grand = 0\n i_upright = 0\n if folds == 5:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3 + num_instruments_fold4:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n else:\n indices_fold5 = np.append(indices_fold5,\n next_instrument_indices).astype(int)\n elif folds == 4:\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_instruments_fold1:\n indices_fold1 = np.append(indices_fold1,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2:\n indices_fold2 = np.append(indices_fold2,\n next_instrument_indices).astype(int)\n elif i < num_instruments_fold1 + num_instruments_fold2 + num_instruments_fold3:\n indices_fold3 = np.append(indices_fold3,\n next_instrument_indices).astype(int)\n else:\n indices_fold4 = np.append(indices_fold4,\n next_instrument_indices).astype(int)\n np.random.shuffle(indices_fold1)\n np.random.shuffle(indices_fold2)\n np.random.shuffle(indices_fold3)\n np.random.shuffle(indices_fold4)\n np.random.shuffle(indices_fold5)\n if verbose:\n print(len(indices_fold1), 'samples in fold 1')\n print('\\t', pd.unique(data.iloc[indices_fold1].instrument))\n print(len(indices_fold2), 'samples in fold 2')\n print('\\t', pd.unique(data.iloc[indices_fold2].instrument))\n print(len(indices_fold3), 'samples in fold 3')\n print('\\t', pd.unique(data.iloc[indices_fold3].instrument))\n print(len(indices_fold4), 'samples in fold 4')\n print('\\t', pd.unique(data.iloc[indices_fold4].instrument))\n if folds == 5:\n print(len(indices_fold5), 'samples in fold 5')\n print('\\t', pd.unique(data.iloc[indices_fold5].instrument))\n return (indices_fold1, indices_fold2, indices_fold3, indices_fold4,\n indices_fold5)\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\n<function token>\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\n<function token>\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\n<function token>\n\n\ndef hyperparameter_search(cnn_type, training_dataset, batch_size_space,\n epochs_space, lr_space, loss_space=None):\n if loss_space is None:\n loss_space = [nn.BCELoss()]\n hyp_search_csv = os.path.join(result_dir, cnn_type.__name__,\n 'hyperparam_search.csv')\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow(['----------New Hyperparameter search----------'])\n writer.writerow(['Batch size', 'Epochs', 'Learning rate',\n 'Loss function'])\n total_combinations = len(loss_space) * len(lr_space) * len(epochs_space\n ) * len(batch_size_space)\n best_score = 0\n best_params = None\n best_stats = None\n i = 0\n for epochs_local in epochs_space:\n for loss_function_local in loss_space:\n for batch_size_local in batch_size_space:\n for learning_rate_local in lr_space:\n i += 1\n print('\\n------ Hyperparameter search combination', i,\n 'of', total_combinations, '------')\n print('Model type:', cnn_type.__name__)\n hyperparams_local = {'batch_size': batch_size_local,\n 'epochs': epochs_local, 'learning_rate':\n learning_rate_local, 'loss_function':\n loss_function_local}\n print(hyperparams_local)\n cv_results = cross_validate(cnn_type=cnn_type, hyparams\n =hyperparams_local, cross_val_subset=\n training_dataset, cv_folds=4, partition_mode=\n 'segment-instruments-random-balanced',\n plot_train_curves=False, verbose=False)\n with open(hyp_search_csv, 'a', newline='') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([batch_size_local, epochs_local,\n learning_rate_local, loss_function_local])\n cv_results.to_csv(hyp_search_csv, mode='a')\n min_class_acc_local = cv_results.loc[\n 'Min per-class accuracy', 'mean']\n bal_acc_local = cv_results.loc[\n 'Balanced (macro-avg) accuracy', 'mean']\n if (min_class_acc_local > best_score and bal_acc_local >\n 0.5):\n best_params = hyperparams_local\n best_score = min_class_acc_local\n best_stats = cv_results\n print('\\n------New best performing combination------')\n print(best_params)\n print('with stats:')\n print(best_stats.round(3))\n return best_params, best_score, best_stats\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\n<function token>\n\n\ndef train_model(cnn_type, params, local_dataset, train_ind, val_loader=None,\n plot=True, plot_title='', verbose=True):\n if verbose:\n print('\\n--------------TRAINING MODEL--------------')\n print(timbre_CNN_type.__name__, 'with parameters:')\n print(params)\n batch_size = params['batch_size']\n epochs = params['epochs']\n learning_rate = params['learning_rate']\n loss_function = params['loss_function']\n loader_train = DataLoader(local_dataset, batch_size=batch_size, shuffle\n =False, sampler=sampler.SubsetRandomSampler(train_ind), pin_memory=True\n )\n model = cnn_type().to(device, non_blocking=True)\n optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)\n with torch.enable_grad():\n loss_train_log = []\n loss_val_log = []\n epoch_val_log = []\n for epoch in range(epochs):\n model.train()\n running_loss = 0.0\n for i, batch in enumerate(loader_train):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n optimizer.zero_grad()\n y = model(x)\n loss = loss_function(y, label)\n loss.backward()\n optimizer.step()\n running_loss += loss.detach()\n gc.collect()\n mean_epoch_loss = (running_loss / (batch_size * (i + 1))).item()\n if verbose:\n print('+Training - Epoch', epoch + 1, 'loss:', mean_epoch_loss)\n loss_train_log.append(mean_epoch_loss)\n if (epoch == epochs - 1 or epoch % val_interval == 0\n ) and val_loader is not None and plot:\n loss_val = 0\n model.eval()\n with torch.no_grad():\n for i, batch in enumerate(val_loader):\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = model(x)\n loss_val += loss_function(y, label).detach()\n gc.collect()\n mean_epoch_val_loss = (loss_val / (batch_size * (i + 1))).item(\n )\n print('\\t+Validation - Epoch', epoch + 1, 'loss:',\n mean_epoch_val_loss)\n loss_val_log.append(mean_epoch_val_loss)\n epoch_val_log.append(epoch + 1)\n fig = None\n if plot:\n fig = plt.figure()\n plt.plot(range(1, epochs + 1), loss_train_log, c='r', label='train')\n if val_loader is not None:\n plt.plot(epoch_val_log, loss_val_log, c='b', label='val')\n plt.legend()\n plt.xlabel('epoch')\n plt.ylabel('loss')\n plt.xticks(np.arange(1, epochs + 1))\n plt.grid()\n plt.title('Loss curve over ' + str(epochs) +\n ' epochs of training - ' + plot_title)\n plt.tight_layout()\n plt.show()\n return model, fig\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\ndef generate_split_indices(data, partition_ratios=None, mode='mixed', seed=None\n ):\n if partition_ratios is None:\n partition_ratios = [0.8, 0.1]\n rng = np.random.default_rng(seed=seed)\n if mode == 'segment-instruments-random':\n instruments = data.instrument.unique()\n rng.shuffle(instruments)\n i = 0\n indices_train = []\n indices_val = []\n indices_test = []\n no_more_instruments = False\n next_instrument_indices = np.asarray(data.instrument == instruments[i]\n ).nonzero()[0]\n while (len(indices_train) + len(next_instrument_indices)) / len(data\n ) <= partition_ratios[0]:\n indices_train = np.append(indices_train, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n no_more_instruments = True\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n while (len(indices_train) + len(indices_val) + len(\n next_instrument_indices)) / len(data) <= partition_ratios[0\n ] + partition_ratios[1] and not no_more_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n i += 1\n if i >= len(instruments):\n break\n next_instrument_indices = np.asarray(data.instrument ==\n instruments[i]).nonzero()[0]\n for j in range(i, len(instruments)):\n indices_test = np.append(indices_test, np.asarray(data.\n instrument == instruments[j]).nonzero()[0])\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-random-balanced':\n instruments_grand = data[data.label == 0].instrument.unique()\n instruments_upright = data[data.label == 1].instrument.unique()\n rng.shuffle(instruments_grand)\n rng.shuffle(instruments_upright)\n num_train_instruments = np.round(partition_ratios[0] * len(data.\n instrument.unique()))\n num_val_instruments = np.round(partition_ratios[1] * len(data.\n instrument.unique()))\n indices_train = []\n indices_val = []\n indices_test = []\n i_grand = 0\n i_upright = 0\n for i in range(0, len(data.instrument.unique())):\n if i % 2 and i_upright < len(instruments_upright):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_upright[i_upright]).nonzero()[0]\n i_upright += 1\n elif i_grand < len(instruments_grand):\n next_instrument_indices = np.asarray(data.instrument ==\n instruments_grand[i_grand]).nonzero()[0]\n i_grand += 1\n else:\n break\n if i < num_train_instruments:\n indices_train = np.append(indices_train,\n next_instrument_indices)\n elif i < num_train_instruments + num_val_instruments:\n indices_val = np.append(indices_val, next_instrument_indices)\n else:\n indices_test = np.append(indices_test, next_instrument_indices)\n if np.sum(partition_ratios) == 1:\n indices_val = np.append(indices_val, indices_test)\n indices_test = []\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-instruments-manual':\n train_instruments = ['Nord_BrightGrand-XL', 'Nord_AmberUpright-XL',\n 'Nord_ConcertGrand1-Lrg', 'Nord_BabyUpright-XL',\n 'Nord_GrandImperial-XL', 'Nord_BlackUpright-Lrg',\n 'Nord_RoyalGrand3D-XL', 'Nord_MellowUpright-XL',\n 'Nord_StudioGrand1-Lrg', 'Nord_RainPiano-Lrg',\n 'Nord_WhiteGrand-XL', 'Nord_RomanticUpright-Lrg',\n 'Nord_VelvetGrand-XL', 'Nord_GrandUpright-XL',\n 'Nord_StudioGrand2-Lrg', 'Nord_SaloonUpright-Lrg',\n 'Nord_ItalianGrand-XL', 'Nord_BlueSwede-Lrg']\n val_instruments = ['Nord_ConcertGrand1Amb-Lrg',\n 'Nord_BambinoUpright-XL', 'Nord_GrandLadyD-Lrg',\n 'Nord_QueenUpright-Lrg', 'Nord_SilverGrand-XL']\n test_instruments = []\n indices_train = np.asarray(data.instrument.isin(train_instruments)\n ).nonzero()[0]\n indices_val = np.asarray(data.instrument.isin(val_instruments)\n ).nonzero()[0]\n indices_test = np.asarray(data.instrument.isin(test_instruments)\n ).nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'segment-velocities':\n indices_train = np.asarray(data.velocity == 'M').nonzero()[0]\n indices_val = np.asarray(data.velocity == 'P').nonzero()[0]\n indices_test = np.asarray(data.velocity == 'F').nonzero()[0]\n np.random.shuffle(indices_train)\n np.random.shuffle(indices_val)\n np.random.shuffle(indices_test)\n elif mode == 'mixed':\n indices = np.arange(len(data))\n rng.shuffle(indices)\n split_point_train = int(len(data) * partition_ratios[0])\n split_point_val = split_point_train + int(len(data) *\n partition_ratios[1])\n indices_train = indices[:split_point_train]\n indices_val = indices[split_point_train:split_point_val]\n indices_test = indices[split_point_val:]\n else:\n raise Exception('Mode not recognised')\n print('')\n indices_train = indices_train.astype(int)\n indices_val = indices_val.astype(int)\n print(len(indices_train), 'training samples')\n print(len(indices_val), 'validation samples')\n print(len(indices_test), 'test samples')\n train_class_balance = data.iloc[indices_train].label.sum(axis=0) / len(\n indices_train)\n print('Train set contains', np.round(train_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_train].instrument))\n val_class_balance = data.iloc[indices_val].label.sum(axis=0) / len(\n indices_val)\n print('Validation set contains', np.round(val_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_val].instrument))\n if len(indices_test) == 0:\n indices_test = np.array([])\n indices_test = indices_test.astype(int)\n else:\n indices_test = indices_test.astype(int)\n test_class_balance = data.iloc[indices_test].label.sum(axis=0) / len(\n indices_test)\n print('Test set contains', np.round(test_class_balance * 100),\n '% Upright pianos')\n if mode == 'segment_instruments':\n print('\\t', pd.unique(data.iloc[indices_test].instrument))\n print('Overall, dataset contains', np.round(100 * data.label.sum(axis=0\n ) / len(data)), '% Upright pianos')\n return indices_train, indices_val, indices_test\n\n\n<function token>\n<function token>\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n\n\ndef evaluate_CNN(evaluated_model, test_set):\n labels_total = np.empty(0, dtype=int)\n preds_total = np.empty(0, dtype=int)\n instruments_acc = np.empty(0, dtype=str)\n evaluated_model.eval()\n with torch.no_grad():\n evaluated_model = evaluated_model.to(device, non_blocking=True)\n for batch in test_set:\n x = batch[0].float().to(device, non_blocking=True)\n label = batch[1].float().to(device, non_blocking=True)\n y = evaluated_model(x)\n pred = torch.round(y)\n labels_total = np.append(labels_total, label.cpu())\n preds_total = np.append(preds_total, pred.cpu())\n instruments_acc = np.append(instruments_acc, np.array(batch[2]))\n per_inst_scores = pd.DataFrame()\n for instrument in np.unique(instruments_acc):\n instrument_mask = np.nonzero(instruments_acc == instrument)\n instrument_scores = evaluate_scores(labels_total[instrument_mask],\n preds_total[instrument_mask])\n piano_class = 'Upright' if labels_total[instrument_mask][0\n ] else 'Grand'\n per_inst_scores = per_inst_scores.append(pd.DataFrame([[np.round(\n instrument_scores['Accuracy'], 2), piano_class]], index=pd.\n Index([instrument], name='Instrument'), columns=['Accuracy',\n 'Class']))\n overall_scores = evaluate_scores(labels_total, preds_total)\n return overall_scores, per_inst_scores\n\n\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
99,026 |
cec2414b851fea9e499387a3e46b0fba090c5db5
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-05-16 10:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0016_category_slug'),
]
operations = [
migrations.RenameField(
model_name='comment',
old_name='text',
new_name='comment',
),
migrations.RenameField(
model_name='comment',
old_name='author',
new_name='name',
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(blank=True, default='', max_length=100, null=True),
),
]
|
[
"# -*- coding: utf-8 -*-\n# Generated by Django 1.9 on 2016-05-16 10:44\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0016_category_slug'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='comment',\n old_name='text',\n new_name='comment',\n ),\n migrations.RenameField(\n model_name='comment',\n old_name='author',\n new_name='name',\n ),\n migrations.AlterField(\n model_name='category',\n name='name',\n field=models.CharField(blank=True, default='', max_length=100, null=True),\n ),\n ]\n",
"from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0016_category_slug')]\n operations = [migrations.RenameField(model_name='comment', old_name=\n 'text', new_name='comment'), migrations.RenameField(model_name=\n 'comment', old_name='author', new_name='name'), migrations.\n AlterField(model_name='category', name='name', field=models.\n CharField(blank=True, default='', max_length=100, null=True))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('blog', '0016_category_slug')]\n operations = [migrations.RenameField(model_name='comment', old_name=\n 'text', new_name='comment'), migrations.RenameField(model_name=\n 'comment', old_name='author', new_name='name'), migrations.\n AlterField(model_name='category', name='name', field=models.\n CharField(blank=True, default='', max_length=100, null=True))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
99,027 |
4e1354f476eefa4b19cff5b3f0ba57f7f03081ca
|
import bge
from command import Command
from gun import Gun
class AimCommand(Command):
#gun = Gun()
def __init__(self,gun):
self.gun = gun
def execute(self,vec):
self.gun.aim(vec)
|
[
"import bge\nfrom command import Command\nfrom gun import Gun\n\nclass AimCommand(Command):\n #gun = Gun() \n def __init__(self,gun):\n self.gun = gun\n \n def execute(self,vec):\n self.gun.aim(vec)",
"import bge\nfrom command import Command\nfrom gun import Gun\n\n\nclass AimCommand(Command):\n\n def __init__(self, gun):\n self.gun = gun\n\n def execute(self, vec):\n self.gun.aim(vec)\n",
"<import token>\n\n\nclass AimCommand(Command):\n\n def __init__(self, gun):\n self.gun = gun\n\n def execute(self, vec):\n self.gun.aim(vec)\n",
"<import token>\n\n\nclass AimCommand(Command):\n <function token>\n\n def execute(self, vec):\n self.gun.aim(vec)\n",
"<import token>\n\n\nclass AimCommand(Command):\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,028 |
46c9f8733837a326ca170386de9ebbcbf0cce3eb
|
import redis
from model import Site, db
import urllib
import requests
# ------------------------------------------------- #
def process_job():
"""processes url web scraping request
If process has been done before, get result from database
Otherwise, process and add data to database"""
r = redis.StrictRedis()
while True:
curr_job = r.blpop('job_queue', 0)[1]
r.hset('status', curr_job, 'processing')
print('current job ID:', curr_job)
# convert byte to string
url = r.hget('urls', curr_job).decode("utf-8")
print('Current URL:', url)
# if this url has not been requested before/is not in the db
if Site.query.filter_by(url=url).first():
r.hset('status', curr_job, 'complete')
print('Job', curr_job, 'Completed')
else:
# fetches url page source
try:
html = str(get_html(url))
print('Successfully retrieved HTML')
# add results to database
db.session.add(Site(url=url, html=html))
db.session.commit()
print('Added to database')
r.hset('status', curr_job, 'complete')
print('Job', curr_job, 'Completed')
except ValueError:
r.hset('status', curr_job, 'abort')
print('Job', curr_job, 'Aborted')
except TimeoutError:
r.hset('status', curr_job, 'timeout')
print('Job', curr_job, 'Timed Out')
return
def get_html(url):
"""Fetches html page source of url"""
print('fetching', url)
try:
re = requests.get(url, timeout=1, stream=True)
print('success!')
# limit file size to 1mb
html = re.raw.read(1000000+1, decode_content=True)
if len(html) > 1000000:
raise ValueError('response too large')
return html
except:
raise TimeoutError('request timed out')
|
[
"import redis\nfrom model import Site, db\nimport urllib\nimport requests\n\n# ------------------------------------------------- #\n\ndef process_job():\n \"\"\"processes url web scraping request\n If process has been done before, get result from database\n Otherwise, process and add data to database\"\"\"\n r = redis.StrictRedis()\n while True:\n curr_job = r.blpop('job_queue', 0)[1]\n r.hset('status', curr_job, 'processing')\n print('current job ID:', curr_job)\n # convert byte to string\n url = r.hget('urls', curr_job).decode(\"utf-8\")\n print('Current URL:', url)\n\n # if this url has not been requested before/is not in the db\n if Site.query.filter_by(url=url).first():\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n else:\n # fetches url page source\n try:\n html = str(get_html(url))\n print('Successfully retrieved HTML')\n # add results to database\n db.session.add(Site(url=url, html=html))\n db.session.commit()\n print('Added to database')\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n except ValueError:\n r.hset('status', curr_job, 'abort')\n print('Job', curr_job, 'Aborted')\n except TimeoutError:\n r.hset('status', curr_job, 'timeout')\n print('Job', curr_job, 'Timed Out')\n return\n\ndef get_html(url):\n \"\"\"Fetches html page source of url\"\"\"\n print('fetching', url)\n try:\n re = requests.get(url, timeout=1, stream=True)\n print('success!')\n # limit file size to 1mb\n html = re.raw.read(1000000+1, decode_content=True)\n if len(html) > 1000000:\n raise ValueError('response too large')\n return html\n except:\n raise TimeoutError('request timed out')\n ",
"import redis\nfrom model import Site, db\nimport urllib\nimport requests\n\n\ndef process_job():\n \"\"\"processes url web scraping request\n If process has been done before, get result from database\n Otherwise, process and add data to database\"\"\"\n r = redis.StrictRedis()\n while True:\n curr_job = r.blpop('job_queue', 0)[1]\n r.hset('status', curr_job, 'processing')\n print('current job ID:', curr_job)\n url = r.hget('urls', curr_job).decode('utf-8')\n print('Current URL:', url)\n if Site.query.filter_by(url=url).first():\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n else:\n try:\n html = str(get_html(url))\n print('Successfully retrieved HTML')\n db.session.add(Site(url=url, html=html))\n db.session.commit()\n print('Added to database')\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n except ValueError:\n r.hset('status', curr_job, 'abort')\n print('Job', curr_job, 'Aborted')\n except TimeoutError:\n r.hset('status', curr_job, 'timeout')\n print('Job', curr_job, 'Timed Out')\n return\n\n\ndef get_html(url):\n \"\"\"Fetches html page source of url\"\"\"\n print('fetching', url)\n try:\n re = requests.get(url, timeout=1, stream=True)\n print('success!')\n html = re.raw.read(1000000 + 1, decode_content=True)\n if len(html) > 1000000:\n raise ValueError('response too large')\n return html\n except:\n raise TimeoutError('request timed out')\n",
"<import token>\n\n\ndef process_job():\n \"\"\"processes url web scraping request\n If process has been done before, get result from database\n Otherwise, process and add data to database\"\"\"\n r = redis.StrictRedis()\n while True:\n curr_job = r.blpop('job_queue', 0)[1]\n r.hset('status', curr_job, 'processing')\n print('current job ID:', curr_job)\n url = r.hget('urls', curr_job).decode('utf-8')\n print('Current URL:', url)\n if Site.query.filter_by(url=url).first():\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n else:\n try:\n html = str(get_html(url))\n print('Successfully retrieved HTML')\n db.session.add(Site(url=url, html=html))\n db.session.commit()\n print('Added to database')\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n except ValueError:\n r.hset('status', curr_job, 'abort')\n print('Job', curr_job, 'Aborted')\n except TimeoutError:\n r.hset('status', curr_job, 'timeout')\n print('Job', curr_job, 'Timed Out')\n return\n\n\ndef get_html(url):\n \"\"\"Fetches html page source of url\"\"\"\n print('fetching', url)\n try:\n re = requests.get(url, timeout=1, stream=True)\n print('success!')\n html = re.raw.read(1000000 + 1, decode_content=True)\n if len(html) > 1000000:\n raise ValueError('response too large')\n return html\n except:\n raise TimeoutError('request timed out')\n",
"<import token>\n\n\ndef process_job():\n \"\"\"processes url web scraping request\n If process has been done before, get result from database\n Otherwise, process and add data to database\"\"\"\n r = redis.StrictRedis()\n while True:\n curr_job = r.blpop('job_queue', 0)[1]\n r.hset('status', curr_job, 'processing')\n print('current job ID:', curr_job)\n url = r.hget('urls', curr_job).decode('utf-8')\n print('Current URL:', url)\n if Site.query.filter_by(url=url).first():\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n else:\n try:\n html = str(get_html(url))\n print('Successfully retrieved HTML')\n db.session.add(Site(url=url, html=html))\n db.session.commit()\n print('Added to database')\n r.hset('status', curr_job, 'complete')\n print('Job', curr_job, 'Completed')\n except ValueError:\n r.hset('status', curr_job, 'abort')\n print('Job', curr_job, 'Aborted')\n except TimeoutError:\n r.hset('status', curr_job, 'timeout')\n print('Job', curr_job, 'Timed Out')\n return\n\n\n<function token>\n",
"<import token>\n<function token>\n<function token>\n"
] | false |
99,029 |
fd1e6719c073a96d52746f328019c23653ed902c
|
from django.urls import path
from . import views
app_name = 'baixa'
urlpatterns = [
path('log/', views.BaixaView.as_view(), name='log'),
path('produto/<int:pk>/',views.baixa_produto, name='baixa'),
]
|
[
"from django.urls import path\nfrom . import views\n\napp_name = 'baixa'\nurlpatterns = [\npath('log/', views.BaixaView.as_view(), name='log'),\npath('produto/<int:pk>/',views.baixa_produto, name='baixa'),\n]",
"from django.urls import path\nfrom . import views\napp_name = 'baixa'\nurlpatterns = [path('log/', views.BaixaView.as_view(), name='log'), path(\n 'produto/<int:pk>/', views.baixa_produto, name='baixa')]\n",
"<import token>\napp_name = 'baixa'\nurlpatterns = [path('log/', views.BaixaView.as_view(), name='log'), path(\n 'produto/<int:pk>/', views.baixa_produto, name='baixa')]\n",
"<import token>\n<assignment token>\n"
] | false |
99,030 |
e5b45dbfd4007f7520cb169278e65c78f3186cb1
|
#!/usr/bin/env python
import logging
import socket
from pipeline import Pipeline
from inputs import FileInput, ZeroMQInput, StdInput
from parsers import RegexParser
from filters import ZuluDateFilter, RemoveFieldsFilter, GrepFilter, LCFilter, UniqFilter, AddFieldsFilter
from outputs import STDOutput, JSONOutput, SOLROutput, ZeroMQOutput
from dirwatcher import DirWatcher
logging.basicConfig(filename='./debug.log', level=logging.INFO, format='%(asctime)s:%(levelname)s:%(message)s')
if __name__ == "__main__":
zmq_in = ZeroMQInput()
p = RegexParser(use = ['apachelog'])
gf = GrepFilter(fields=['uri'],regex='health_check_status', reverse=True)
rff = RemoveFieldsFilter(fields = ['msg'])
zdf = ZuluDateFilter(fields=['date'],informat="%d/%b/%Y:%H:%M:%S")
uniq = UniqFilter()
solr_typemap = { 'date' : '_dt',
'hostname' : '_ti',
'client_ip' : '_ti',
'uri' : '_tp',
'server' : '_s',
'file' : '_tp',
'serve_time' : '_l', }
solr = SOLROutput('http://localhost:8080/solr/medley',
commitrate=1000, typemap=solr_typemap )
pipeline = Pipeline(pipes = [zmq_in,p,gf,rff,zdf,uniq,solr])
for data in pipeline:
pass
|
[
"#!/usr/bin/env python\nimport logging\nimport socket\nfrom pipeline import Pipeline\nfrom inputs import FileInput, ZeroMQInput, StdInput\nfrom parsers import RegexParser \nfrom filters import ZuluDateFilter, RemoveFieldsFilter, GrepFilter, LCFilter, UniqFilter, AddFieldsFilter\nfrom outputs import STDOutput, JSONOutput, SOLROutput, ZeroMQOutput\nfrom dirwatcher import DirWatcher\n\nlogging.basicConfig(filename='./debug.log', level=logging.INFO, format='%(asctime)s:%(levelname)s:%(message)s')\n\nif __name__ == \"__main__\":\n\n zmq_in = ZeroMQInput()\n p = RegexParser(use = ['apachelog']) \n gf = GrepFilter(fields=['uri'],regex='health_check_status', reverse=True)\n rff = RemoveFieldsFilter(fields = ['msg'])\n zdf = ZuluDateFilter(fields=['date'],informat=\"%d/%b/%Y:%H:%M:%S\")\n uniq = UniqFilter()\n solr_typemap = { 'date' : '_dt',\n 'hostname' : '_ti',\n 'client_ip' : '_ti',\n 'uri' : '_tp',\n 'server' : '_s',\n 'file' : '_tp',\n 'serve_time' : '_l', }\n\n solr = SOLROutput('http://localhost:8080/solr/medley',\n commitrate=1000, typemap=solr_typemap )\n\n pipeline = Pipeline(pipes = [zmq_in,p,gf,rff,zdf,uniq,solr])\n for data in pipeline:\n pass \n",
"import logging\nimport socket\nfrom pipeline import Pipeline\nfrom inputs import FileInput, ZeroMQInput, StdInput\nfrom parsers import RegexParser\nfrom filters import ZuluDateFilter, RemoveFieldsFilter, GrepFilter, LCFilter, UniqFilter, AddFieldsFilter\nfrom outputs import STDOutput, JSONOutput, SOLROutput, ZeroMQOutput\nfrom dirwatcher import DirWatcher\nlogging.basicConfig(filename='./debug.log', level=logging.INFO, format=\n '%(asctime)s:%(levelname)s:%(message)s')\nif __name__ == '__main__':\n zmq_in = ZeroMQInput()\n p = RegexParser(use=['apachelog'])\n gf = GrepFilter(fields=['uri'], regex='health_check_status', reverse=True)\n rff = RemoveFieldsFilter(fields=['msg'])\n zdf = ZuluDateFilter(fields=['date'], informat='%d/%b/%Y:%H:%M:%S')\n uniq = UniqFilter()\n solr_typemap = {'date': '_dt', 'hostname': '_ti', 'client_ip': '_ti',\n 'uri': '_tp', 'server': '_s', 'file': '_tp', 'serve_time': '_l'}\n solr = SOLROutput('http://localhost:8080/solr/medley', commitrate=1000,\n typemap=solr_typemap)\n pipeline = Pipeline(pipes=[zmq_in, p, gf, rff, zdf, uniq, solr])\n for data in pipeline:\n pass\n",
"<import token>\nlogging.basicConfig(filename='./debug.log', level=logging.INFO, format=\n '%(asctime)s:%(levelname)s:%(message)s')\nif __name__ == '__main__':\n zmq_in = ZeroMQInput()\n p = RegexParser(use=['apachelog'])\n gf = GrepFilter(fields=['uri'], regex='health_check_status', reverse=True)\n rff = RemoveFieldsFilter(fields=['msg'])\n zdf = ZuluDateFilter(fields=['date'], informat='%d/%b/%Y:%H:%M:%S')\n uniq = UniqFilter()\n solr_typemap = {'date': '_dt', 'hostname': '_ti', 'client_ip': '_ti',\n 'uri': '_tp', 'server': '_s', 'file': '_tp', 'serve_time': '_l'}\n solr = SOLROutput('http://localhost:8080/solr/medley', commitrate=1000,\n typemap=solr_typemap)\n pipeline = Pipeline(pipes=[zmq_in, p, gf, rff, zdf, uniq, solr])\n for data in pipeline:\n pass\n",
"<import token>\n<code token>\n"
] | false |
99,031 |
ac0f34e63ce3f1e37859d91ccd05a9e9fd9c4313
|
# -*- coding: utf-8 -*-
import requests
import json
from colored import fg, attr
from PyInquirer import style_from_dict, Token, prompt
from PyInquirer import Validator, ValidationError
import regex
import mysql.connector
from mysql.connector import errorcode
from .cmdb_data_model import cmdb_data_model
"""
Color definition.
"""
blue = fg('#46B1C9')
red = fg('#B54653')
green = fg('#86DEB7')
reset = attr('reset')
style = style_from_dict({
Token.QuestionMark: '#B54653 bold',
Token.Selected: '#86DEB7 bold',
Token.Instruction: '', # default
Token.Answer: '#46B1C9 bold',
Token.Question: '',
})
class NotEmpty(Validator):
def validate(self, document):
ok = document.text != "" and document.text != None
if not ok:
raise ValidationError(
message='Please enter something',
cursor_position=len(document.text)) # Move cursor to end
class AddressValidator(Validator):
def validate(self, document):
ok = regex.match(
r'(\d{1,3}\.){3}\d{1,3}', document.text)
if not ok:
raise ValidationError(
message='Please enter a valid IP address.',
cursor_position=len(document.text)) # Move cursor to end
def db_specification():
"""
Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.
Returns
-------
dict
The database information (server address, username, password and database name).
"""
db_specification_question = [
{
'type': 'input',
'message': 'Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where \'y\' is optional):',
'name': 'server',
'validate': AddressValidator
},
{
'type': 'input',
'message': 'Enter your database name:',
'name': 'db_name',
'validate': NotEmpty
},
{
'type': 'input',
'message': 'Enter your database username:',
'name': 'username',
'validate': NotEmpty
},
{
'type': 'password',
'message': 'Enter your database password:',
'name': 'password'
}
]
db_specification_answer = prompt(db_specification_question, style=style)
return db_specification_answer
def test_db_connection(server, db_name, username, passwd):
"""
Tests the access to the CMDB database.
Parameters
----------
server : string
The IP address of the CMDB server.
db_name: string
The CMDB database name.
username : string
The CMDB username.
password : string
The CMDB password.
Returns
-------
boolean
Returns true if the connection was successful and false otherwise.
"""
print(blue + "\n>>> " + reset + "Checking i-doit database connection...")
cnx = None
try:
cnx = mysql.connector.connect(
user=username, password=passwd, host=server, database=db_name)
print(green + "\n>>> " + reset +
"Successfully connected to the i-doit database.")
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print(red + "\n>>> " + reset +
"Something is wrong with your username or password.")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print(red + "\n>>> " + reset + "Database does not exist.")
else:
print(red + "\n>>> " + reset + str(err))
return cnx
def api_specification():
"""
Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.
Returns
-------
dict
The CMDB information (server address, username, password and api key).
"""
api_specification_question = [
{
'type': 'input',
'message': 'Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where \'y\' is optional):',
'name': 'server',
'validate': AddressValidator
},
{
'type': 'input',
'message': 'Enter your CMDB username:',
'name': 'username',
'validate': NotEmpty
},
{
'type': 'password',
'message': 'Enter your CMDB password:',
'name': 'password'
},
{
'type': 'input',
'message': 'Enter your API key:',
'name': 'api_key',
'validate': NotEmpty
}
]
api_specification_answer = prompt(api_specification_question, style=style)
return api_specification_answer
def test_api_connection(server, username, password, api_key):
"""
Tests the access to the CMDB.
Parameters
----------
server : string
The IP address of the CMDB server.
username : string
The CMDB username.
password : string
The CMDB password.
api_key: string
The CMDB API key.
Returns
-------
boolean
Returns true if the connection was successful and false otherwise.
"""
global api_url
api_url = "http://" + server + "/i-doit/src/jsonrpc.php"
global headers
headers = {}
headers["Content-Type"] = "application/json"
headers["X-RPC-Auth-Username"] = username
headers["X-RPC-Auth-Password"] = password
global apikey
apikey = api_key
print(blue + "\n>>> " + reset + "Checking API connection...")
login_body = json.loads("{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"" +
apikey + "\",\"language\": \"en\"},\"id\": 1}")
try:
s = requests.Session()
login_request = s.post(api_url, json=login_body, headers=headers)
login = login_request.json()
if "error" in login:
print(red + "\n>>> " + reset +
"Unable to connect to the API. Please verify the connection information.")
return False
else:
print(green + "\n>>> " + reset + "Successfully connected.")
return True
except requests.exceptions.RequestException:
print(red + "\n>>> " + reset +
"Unable to connect to the API. Please verify the connection information.")
return False
def api_constants():
"""
Executes the method 'idoit.contants' of the i-doit API.
Gets the configuration item types, relationship types, and categories present in the CMDB.
Returns
-------
boolean
Returns the result of the execution of the method.
"""
constants_body = json.loads("{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"" +
apikey + "\",\"language\": \"en\"},\"id\": 1}")
try:
s = requests.Session()
constants_request = s.post(
api_url, json=constants_body, headers=headers)
constants = constants_request.json()
return constants.get("result")
except requests.exceptions.RequestException:
print(red + "\n>>> " + reset +
"Unable to connect to the API. Please verify the connection information.\n")
return None
def get_dialogs_from_table(table, db, cursor):
values = {}
if table != None:
name = str(table) + "__id"
desc = str(table) + "__title"
query = ("SELECT " + name + ", " + desc +
" FROM " + db + "." + table + ";")
cursor.execute(query)
for t in cursor:
name, value = t
values[name] = value
return values
def api_category_info(category, db_info, connection):
"""
Executes the method 'cmdb.category_info' of the i-doit API for a given category.
Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.
Parameters
----------
category : string
The category name.
Returns
-------
dict
Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.
"""
res = {}
attributes = []
types = {}
dialogs = {}
cat_body = json.loads("{\"version\": \"2.0\",\"method\": \"cmdb.category_info\",\"params\": {\"category\": \"" +
category + "\", \"apikey\": \"" + apikey + "\",\"language\": \"en\"},\"id\": 1}")
server = db_info.get("server")
username = db_info.get("username")
password = db_info.get("password")
db_name = db_info.get("db_name")
try:
s = requests.Session()
cat_request = s.post(api_url, json=cat_body, headers=headers)
if cat_request.text != "":
if "result" in cat_request.json():
for attr in cat_request.json()["result"]:
new_atr = {}
new_atr[cat_request.json()["result"][attr]["title"]] = attr
types[cat_request.json()["result"][attr]["title"]] = cat_request.json()[
"result"][attr]["data"]["type"]
dialog = cat_request.json().get("result").get(attr).get("info").get("type")
d = {}
if dialog == "dialog":
dialog_body = json.loads("{\"version\": \"2.0\",\"method\": \"cmdb.dialog.read\",\"params\": {\"category\": \"" +
category + "\", \"property\": \"" + attr + "\", \"apikey\": \"" + apikey + "\",\"language\": \"en\"},\"id\": 1}")
s = requests.Session()
dialog_request = s.post(
api_url, json=dialog_body, headers=headers)
if dialog_request.text != "":
values = dialog_request.json().get("result")
if values != None:
if len(values) == 1:
values = values[0]
if values != None:
for a in values:
if type(a) is dict:
value = a.get("id")
name = a.get("title")
d[value] = name
elif dialog == "dialog_plus":
cursor = connection.cursor()
table = cat_request.json().get("result").get(attr).get(
"data").get("sourceTable")
values = get_dialogs_from_table(table, db_name, cursor)
if len(d) > 0:
dialogs[attr] = d
attributes.append(new_atr)
res["attributes"] = attributes
res["types"] = types
res["dialogs"] = dialogs
return res
except requests.exceptions.RequestException:
print(red + "\n>>> " + reset +
"Unable to connect to the API. Please verify the connection information.\n")
return None
def category_attributes_types(categories, db_info, connection):
"""
Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.
Parameters
----------
categories : list
The category names.
Returns
-------
dict
Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.
"""
attributes = {}
for cat in categories:
attributes[cat] = {}
category_info = api_category_info(cat, db_info, connection)
attr = {}
for a in category_info.get("attributes"):
for key in a:
attr[key] = a[key]
attributes[cat]["attributes"] = {k: d for d, k in attr.items()}
types = category_info.get("types")
attributes[cat]["types"] = {
attr.get(a): types.get(a) for a in types}
attributes[cat]["dialogs"] = category_info.get("dialogs")
return attributes
def get_object_attributes(ci, cat_attr_types):
"""
Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.
Gets the categories associated with an object type.
Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.
Parameters
----------
ci : string
The object name.
cat_attr_types : dict
The attributes, its data types and the available values of the dialog type attributes, associated with every category, .
Returns
-------
dict
Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.
"""
res = {}
object_attributes = {}
attributes_types = {}
dialogs = {}
obj_categories_body = json.loads("{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"" +
ci + "\", \"apikey\": \"" + apikey + "\",\"language\": \"en\"},\"id\": 1}")
try:
s = requests.Session()
obj_categories_request = s.post(
api_url, json=obj_categories_body, headers=headers)
if obj_categories_request.text != "":
if "result" in obj_categories_request.json():
if "catg" in obj_categories_request.json()["result"]:
for cat_g in obj_categories_request.json()["result"]["catg"]:
cat = cat_g["const"]
if cat in cat_attr_types:
dialogs.update(
cat_attr_types.get(cat).get("dialogs"))
attrs = cat_attr_types.get(cat).get("attributes")
types = cat_attr_types.get(cat).get("types")
object_attributes.update(attrs)
attributes_types.update(types)
if "cats" in obj_categories_request.json()["result"]:
for cat_s in obj_categories_request.json()["result"]["cats"]:
cat = cat_s["const"]
if cat in cat_attr_types:
dialogs.update(
cat_attr_types.get(cat).get("dialogs"))
attrs = cat_attr_types.get(cat).get("attributes")
types = cat_attr_types.get(cat).get("types")
object_attributes.update(attrs)
attributes_types.update(types)
res["dialogs"] = dialogs
res["attributes"] = object_attributes
res["types"] = attributes_types
return res
except requests.exceptions.RequestException:
print(red + "\n>>> " + reset +
"Unable to connect to the API. Please verify the connection information.\n")
return None
def process_i_doit():
"""
Processes the i-doit CMDB data model, obtaining information about configuration item types,
relationship types, configuration items and relationship attributes, restrictions between relationships,
data types of attributes, and values for dialog type attributes.
Returns
-------
dict
Returns the CMDB information (server address, username, password and api key).
"""
print(blue + "\n>>> " + reset + "Make sure that i-doit is running.")
api_info = api_specification()
server = api_info.get("server")
username = api_info.get("username")
password = api_info.get("password")
api_key = api_info.get("api_key")
connection = test_api_connection(server, username, password, api_key)
if connection == False:
return process_i_doit()
else:
print(blue + "\n>>> " + reset + "Make sure that i-doit is running.\n")
db_info = db_specification()
server = db_info.get("server")
username = db_info.get("username")
password = db_info.get("password")
db_name = db_info.get("db_name")
connection = test_db_connection(server, db_name, username, password)
if connection == None:
return process_i_doit()
else:
print(blue + "\n>>> " + reset +
"Processing i-doit CMDB data model...")
constants = api_constants()
if constants == None:
process_i_doit()
else:
ci_types = constants.get("objectTypes")
cmdb_data_model["ci_types"] = ci_types
rel_types = constants.get("relationTypes")
cmdb_data_model["rel_types"] = rel_types
categories = [c for c in {
**constants.get("categories").get("g"), **constants.get("categories").get("s")}]
cat_attr_types = category_attributes_types(
categories, db_info, connection)
ci_attributes_types = {}
for ci in ci_types:
attrs = get_object_attributes(ci, cat_attr_types)
if attrs == None:
process_i_doit()
else:
ci_attributes_types[ci] = attrs
rel_attributes_types = {}
attrs = get_object_attributes(
"C__OBJTYPE__RELATION", cat_attr_types)
if attrs == None:
process_i_doit()
else:
for rel in rel_types:
rel_attributes_types[rel] = attrs
cmdb_data_model["ci_attributes"] = {
ci: ci_attributes_types[ci]["attributes"] for ci in ci_attributes_types}
cmdb_data_model["ci_attributes_data_types"] = {
ci: ci_attributes_types[ci]["types"] for ci in ci_attributes_types}
cmdb_data_model["ci_dialog_attributes"] = {
ci: ci_attributes_types[ci]["dialogs"] for ci in ci_attributes_types}
cmdb_data_model["rel_attributes"] = {
rel: rel_attributes_types[rel]["attributes"] for rel in rel_attributes_types}
cmdb_data_model["rel_attributes_data_types"] = {
rel: rel_attributes_types[rel]["types"] for rel in rel_attributes_types}
return api_info
|
[
"# -*- coding: utf-8 -*-\n\nimport requests\nimport json\nfrom colored import fg, attr\nfrom PyInquirer import style_from_dict, Token, prompt\nfrom PyInquirer import Validator, ValidationError\nimport regex\nimport mysql.connector\nfrom mysql.connector import errorcode\n\nfrom .cmdb_data_model import cmdb_data_model\n\n\"\"\"\n Color definition.\n\"\"\"\nblue = fg('#46B1C9')\nred = fg('#B54653')\ngreen = fg('#86DEB7')\nreset = attr('reset')\n\nstyle = style_from_dict({\n Token.QuestionMark: '#B54653 bold',\n Token.Selected: '#86DEB7 bold',\n Token.Instruction: '', # default\n Token.Answer: '#46B1C9 bold',\n Token.Question: '',\n})\n\n\nclass NotEmpty(Validator):\n def validate(self, document):\n ok = document.text != \"\" and document.text != None\n if not ok:\n raise ValidationError(\n message='Please enter something',\n cursor_position=len(document.text)) # Move cursor to end\n\n\nclass AddressValidator(Validator):\n def validate(self, document):\n ok = regex.match(\n r'(\\d{1,3}\\.){3}\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(\n message='Please enter a valid IP address.',\n cursor_position=len(document.text)) # Move cursor to end\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [\n {\n 'type': 'input',\n 'message': 'Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where \\'y\\' is optional):',\n 'name': 'server',\n 'validate': AddressValidator\n },\n {\n 'type': 'input',\n 'message': 'Enter your database name:',\n 'name': 'db_name',\n 'validate': NotEmpty\n },\n {\n 'type': 'input',\n 'message': 'Enter your database username:',\n 'name': 'username',\n 'validate': NotEmpty\n },\n {\n 'type': 'password',\n 'message': 'Enter your database password:',\n 'name': 'password'\n }\n ]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + \"\\n>>> \" + reset + \"Checking i-doit database connection...\")\n cnx = None\n try:\n cnx = mysql.connector.connect(\n user=username, password=passwd, host=server, database=db_name)\n print(green + \"\\n>>> \" + reset +\n \"Successfully connected to the i-doit database.\")\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + \"\\n>>> \" + reset +\n \"Something is wrong with your username or password.\")\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + \"\\n>>> \" + reset + \"Database does not exist.\")\n else:\n print(red + \"\\n>>> \" + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [\n {\n 'type': 'input',\n 'message': 'Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where \\'y\\' is optional):',\n 'name': 'server',\n 'validate': AddressValidator\n },\n {\n 'type': 'input',\n 'message': 'Enter your CMDB username:',\n 'name': 'username',\n 'validate': NotEmpty\n },\n {\n 'type': 'password',\n 'message': 'Enter your CMDB password:',\n 'name': 'password'\n },\n {\n 'type': 'input',\n 'message': 'Enter your API key:',\n 'name': 'api_key',\n 'validate': NotEmpty\n }\n ]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = \"http://\" + server + \"/i-doit/src/jsonrpc.php\"\n\n global headers\n headers = {}\n headers[\"Content-Type\"] = \"application/json\"\n headers[\"X-RPC-Auth-Username\"] = username\n headers[\"X-RPC-Auth-Password\"] = password\n\n global apikey\n apikey = api_key\n\n print(blue + \"\\n>>> \" + reset + \"Checking API connection...\")\n\n login_body = json.loads(\"{\\\"version\\\": \\\"2.0\\\",\\\"method\\\": \\\"idoit.login\\\",\\\"params\\\": {\\\"apikey\\\": \\\"\" +\n apikey + \"\\\",\\\"language\\\": \\\"en\\\"},\\\"id\\\": 1}\")\n\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if \"error\" in login:\n print(red + \"\\n>>> \" + reset +\n \"Unable to connect to the API. Please verify the connection information.\")\n return False\n else:\n print(green + \"\\n>>> \" + reset + \"Successfully connected.\")\n return True\n except requests.exceptions.RequestException:\n print(red + \"\\n>>> \" + reset +\n \"Unable to connect to the API. Please verify the connection information.\")\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\"{\\\"version\\\": \\\"2.0\\\",\\\"method\\\": \\\"idoit.constants\\\",\\\"params\\\": {\\\"apikey\\\": \\\"\" +\n apikey + \"\\\",\\\"language\\\": \\\"en\\\"},\\\"id\\\": 1}\")\n try:\n s = requests.Session()\n constants_request = s.post(\n api_url, json=constants_body, headers=headers)\n constants = constants_request.json()\n return constants.get(\"result\")\n except requests.exceptions.RequestException:\n print(red + \"\\n>>> \" + reset +\n \"Unable to connect to the API. Please verify the connection information.\\n\")\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + \"__id\"\n desc = str(table) + \"__title\"\n query = (\"SELECT \" + name + \", \" + desc +\n \" FROM \" + db + \".\" + table + \";\")\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n\n return values\n\n\ndef api_category_info(category, db_info, connection):\n \"\"\"\n Executes the method 'cmdb.category_info' of the i-doit API for a given category.\n Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.\n\n Parameters\n ----------\n category : string\n The category name.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.\n \"\"\"\n res = {}\n attributes = []\n types = {}\n dialogs = {}\n cat_body = json.loads(\"{\\\"version\\\": \\\"2.0\\\",\\\"method\\\": \\\"cmdb.category_info\\\",\\\"params\\\": {\\\"category\\\": \\\"\" +\n category + \"\\\", \\\"apikey\\\": \\\"\" + apikey + \"\\\",\\\"language\\\": \\\"en\\\"},\\\"id\\\": 1}\")\n\n server = db_info.get(\"server\")\n username = db_info.get(\"username\")\n password = db_info.get(\"password\")\n db_name = db_info.get(\"db_name\")\n\n try:\n s = requests.Session()\n cat_request = s.post(api_url, json=cat_body, headers=headers)\n if cat_request.text != \"\":\n if \"result\" in cat_request.json():\n for attr in cat_request.json()[\"result\"]:\n new_atr = {}\n new_atr[cat_request.json()[\"result\"][attr][\"title\"]] = attr\n types[cat_request.json()[\"result\"][attr][\"title\"]] = cat_request.json()[\n \"result\"][attr][\"data\"][\"type\"]\n dialog = cat_request.json().get(\"result\").get(attr).get(\"info\").get(\"type\")\n\n d = {}\n\n if dialog == \"dialog\":\n\n dialog_body = json.loads(\"{\\\"version\\\": \\\"2.0\\\",\\\"method\\\": \\\"cmdb.dialog.read\\\",\\\"params\\\": {\\\"category\\\": \\\"\" +\n category + \"\\\", \\\"property\\\": \\\"\" + attr + \"\\\", \\\"apikey\\\": \\\"\" + apikey + \"\\\",\\\"language\\\": \\\"en\\\"},\\\"id\\\": 1}\")\n s = requests.Session()\n dialog_request = s.post(\n api_url, json=dialog_body, headers=headers)\n if dialog_request.text != \"\":\n values = dialog_request.json().get(\"result\")\n if values != None:\n if len(values) == 1:\n values = values[0]\n if values != None:\n for a in values:\n if type(a) is dict:\n value = a.get(\"id\")\n name = a.get(\"title\")\n d[value] = name\n\n elif dialog == \"dialog_plus\":\n cursor = connection.cursor()\n\n table = cat_request.json().get(\"result\").get(attr).get(\n \"data\").get(\"sourceTable\")\n\n values = get_dialogs_from_table(table, db_name, cursor)\n\n if len(d) > 0:\n dialogs[attr] = d\n\n attributes.append(new_atr)\n res[\"attributes\"] = attributes\n res[\"types\"] = types\n res[\"dialogs\"] = dialogs\n return res\n except requests.exceptions.RequestException:\n print(red + \"\\n>>> \" + reset +\n \"Unable to connect to the API. Please verify the connection information.\\n\")\n return None\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n\n attr = {}\n for a in category_info.get(\"attributes\"):\n for key in a:\n attr[key] = a[key]\n\n attributes[cat][\"attributes\"] = {k: d for d, k in attr.items()}\n\n types = category_info.get(\"types\")\n attributes[cat][\"types\"] = {\n attr.get(a): types.get(a) for a in types}\n\n attributes[cat][\"dialogs\"] = category_info.get(\"dialogs\")\n\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\"{\\\"version\\\": \\\"2.0\\\",\\\"method\\\": \\\"cmdb.object_type_categories.read\\\",\\\"params\\\": {\\\"type\\\": \\\"\" +\n ci + \"\\\", \\\"apikey\\\": \\\"\" + apikey + \"\\\",\\\"language\\\": \\\"en\\\"},\\\"id\\\": 1}\")\n try:\n s = requests.Session()\n obj_categories_request = s.post(\n api_url, json=obj_categories_body, headers=headers)\n\n if obj_categories_request.text != \"\":\n if \"result\" in obj_categories_request.json():\n if \"catg\" in obj_categories_request.json()[\"result\"]:\n for cat_g in obj_categories_request.json()[\"result\"][\"catg\"]:\n cat = cat_g[\"const\"]\n if cat in cat_attr_types:\n dialogs.update(\n cat_attr_types.get(cat).get(\"dialogs\"))\n attrs = cat_attr_types.get(cat).get(\"attributes\")\n types = cat_attr_types.get(cat).get(\"types\")\n object_attributes.update(attrs)\n attributes_types.update(types)\n if \"cats\" in obj_categories_request.json()[\"result\"]:\n for cat_s in obj_categories_request.json()[\"result\"][\"cats\"]:\n cat = cat_s[\"const\"]\n if cat in cat_attr_types:\n dialogs.update(\n cat_attr_types.get(cat).get(\"dialogs\"))\n attrs = cat_attr_types.get(cat).get(\"attributes\")\n types = cat_attr_types.get(cat).get(\"types\")\n object_attributes.update(attrs)\n attributes_types.update(types)\n res[\"dialogs\"] = dialogs\n res[\"attributes\"] = object_attributes\n res[\"types\"] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + \"\\n>>> \" + reset +\n \"Unable to connect to the API. Please verify the connection information.\\n\")\n return None\n\n\ndef process_i_doit():\n \"\"\"\n Processes the i-doit CMDB data model, obtaining information about configuration item types, \n relationship types, configuration items and relationship attributes, restrictions between relationships, \n data types of attributes, and values for dialog type attributes.\n\n Returns\n -------\n dict\n Returns the CMDB information (server address, username, password and api key).\n \"\"\"\n print(blue + \"\\n>>> \" + reset + \"Make sure that i-doit is running.\")\n api_info = api_specification()\n\n server = api_info.get(\"server\")\n username = api_info.get(\"username\")\n password = api_info.get(\"password\")\n api_key = api_info.get(\"api_key\")\n\n connection = test_api_connection(server, username, password, api_key)\n if connection == False:\n return process_i_doit()\n else:\n print(blue + \"\\n>>> \" + reset + \"Make sure that i-doit is running.\\n\")\n db_info = db_specification()\n server = db_info.get(\"server\")\n username = db_info.get(\"username\")\n password = db_info.get(\"password\")\n db_name = db_info.get(\"db_name\")\n\n connection = test_db_connection(server, db_name, username, password)\n if connection == None:\n return process_i_doit()\n\n else:\n\n print(blue + \"\\n>>> \" + reset +\n \"Processing i-doit CMDB data model...\")\n constants = api_constants()\n\n if constants == None:\n process_i_doit()\n else:\n ci_types = constants.get(\"objectTypes\")\n cmdb_data_model[\"ci_types\"] = ci_types\n rel_types = constants.get(\"relationTypes\")\n cmdb_data_model[\"rel_types\"] = rel_types\n\n categories = [c for c in {\n **constants.get(\"categories\").get(\"g\"), **constants.get(\"categories\").get(\"s\")}]\n cat_attr_types = category_attributes_types(\n categories, db_info, connection)\n\n ci_attributes_types = {}\n\n for ci in ci_types:\n attrs = get_object_attributes(ci, cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n ci_attributes_types[ci] = attrs\n\n rel_attributes_types = {}\n\n attrs = get_object_attributes(\n \"C__OBJTYPE__RELATION\", cat_attr_types)\n\n if attrs == None:\n process_i_doit()\n else:\n for rel in rel_types:\n rel_attributes_types[rel] = attrs\n\n cmdb_data_model[\"ci_attributes\"] = {\n ci: ci_attributes_types[ci][\"attributes\"] for ci in ci_attributes_types}\n\n cmdb_data_model[\"ci_attributes_data_types\"] = {\n ci: ci_attributes_types[ci][\"types\"] for ci in ci_attributes_types}\n\n cmdb_data_model[\"ci_dialog_attributes\"] = {\n ci: ci_attributes_types[ci][\"dialogs\"] for ci in ci_attributes_types}\n\n cmdb_data_model[\"rel_attributes\"] = {\n rel: rel_attributes_types[rel][\"attributes\"] for rel in rel_attributes_types}\n\n cmdb_data_model[\"rel_attributes_data_types\"] = {\n rel: rel_attributes_types[rel][\"types\"] for rel in rel_attributes_types}\n\n return api_info\n",
"import requests\nimport json\nfrom colored import fg, attr\nfrom PyInquirer import style_from_dict, Token, prompt\nfrom PyInquirer import Validator, ValidationError\nimport regex\nimport mysql.connector\nfrom mysql.connector import errorcode\nfrom .cmdb_data_model import cmdb_data_model\n<docstring token>\nblue = fg('#46B1C9')\nred = fg('#B54653')\ngreen = fg('#86DEB7')\nreset = attr('reset')\nstyle = style_from_dict({Token.QuestionMark: '#B54653 bold', Token.Selected:\n '#86DEB7 bold', Token.Instruction: '', Token.Answer: '#46B1C9 bold',\n Token.Question: ''})\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Checking i-doit database connection...')\n cnx = None\n try:\n cnx = mysql.connector.connect(user=username, password=passwd, host=\n server, database=db_name)\n print(green + '\\n>>> ' + reset +\n 'Successfully connected to the i-doit database.')\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + '\\n>>> ' + reset +\n 'Something is wrong with your username or password.')\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + '\\n>>> ' + reset + 'Database does not exist.')\n else:\n print(red + '\\n>>> ' + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\ndef api_category_info(category, db_info, connection):\n \"\"\"\n Executes the method 'cmdb.category_info' of the i-doit API for a given category.\n Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.\n\n Parameters\n ----------\n category : string\n The category name.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.\n \"\"\"\n res = {}\n attributes = []\n types = {}\n dialogs = {}\n cat_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.category_info\",\"params\": {\"category\": \"'\n + category + '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n try:\n s = requests.Session()\n cat_request = s.post(api_url, json=cat_body, headers=headers)\n if cat_request.text != '':\n if 'result' in cat_request.json():\n for attr in cat_request.json()['result']:\n new_atr = {}\n new_atr[cat_request.json()['result'][attr]['title']] = attr\n types[cat_request.json()['result'][attr]['title']\n ] = cat_request.json()['result'][attr]['data']['type']\n dialog = cat_request.json().get('result').get(attr).get(\n 'info').get('type')\n d = {}\n if dialog == 'dialog':\n dialog_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.dialog.read\",\"params\": {\"category\": \"'\n + category + '\", \"property\": \"' + attr +\n '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n s = requests.Session()\n dialog_request = s.post(api_url, json=dialog_body,\n headers=headers)\n if dialog_request.text != '':\n values = dialog_request.json().get('result')\n if values != None:\n if len(values) == 1:\n values = values[0]\n if values != None:\n for a in values:\n if type(a) is dict:\n value = a.get('id')\n name = a.get('title')\n d[value] = name\n elif dialog == 'dialog_plus':\n cursor = connection.cursor()\n table = cat_request.json().get('result').get(attr).get(\n 'data').get('sourceTable')\n values = get_dialogs_from_table(table, db_name, cursor)\n if len(d) > 0:\n dialogs[attr] = d\n attributes.append(new_atr)\n res['attributes'] = attributes\n res['types'] = types\n res['dialogs'] = dialogs\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef process_i_doit():\n \"\"\"\n Processes the i-doit CMDB data model, obtaining information about configuration item types, \n relationship types, configuration items and relationship attributes, restrictions between relationships, \n data types of attributes, and values for dialog type attributes.\n\n Returns\n -------\n dict\n Returns the CMDB information (server address, username, password and api key).\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.')\n api_info = api_specification()\n server = api_info.get('server')\n username = api_info.get('username')\n password = api_info.get('password')\n api_key = api_info.get('api_key')\n connection = test_api_connection(server, username, password, api_key)\n if connection == False:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.\\n')\n db_info = db_specification()\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n connection = test_db_connection(server, db_name, username, password)\n if connection == None:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset +\n 'Processing i-doit CMDB data model...')\n constants = api_constants()\n if constants == None:\n process_i_doit()\n else:\n ci_types = constants.get('objectTypes')\n cmdb_data_model['ci_types'] = ci_types\n rel_types = constants.get('relationTypes')\n cmdb_data_model['rel_types'] = rel_types\n categories = [c for c in {**constants.get('categories').get\n ('g'), **constants.get('categories').get('s')}]\n cat_attr_types = category_attributes_types(categories,\n db_info, connection)\n ci_attributes_types = {}\n for ci in ci_types:\n attrs = get_object_attributes(ci, cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n ci_attributes_types[ci] = attrs\n rel_attributes_types = {}\n attrs = get_object_attributes('C__OBJTYPE__RELATION',\n cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n for rel in rel_types:\n rel_attributes_types[rel] = attrs\n cmdb_data_model['ci_attributes'] = {ci: ci_attributes_types\n [ci]['attributes'] for ci in ci_attributes_types}\n cmdb_data_model['ci_attributes_data_types'] = {ci:\n ci_attributes_types[ci]['types'] for ci in\n ci_attributes_types}\n cmdb_data_model['ci_dialog_attributes'] = {ci:\n ci_attributes_types[ci]['dialogs'] for ci in\n ci_attributes_types}\n cmdb_data_model['rel_attributes'] = {rel:\n rel_attributes_types[rel]['attributes'] for rel in\n rel_attributes_types}\n cmdb_data_model['rel_attributes_data_types'] = {rel:\n rel_attributes_types[rel]['types'] for rel in\n rel_attributes_types}\n return api_info\n",
"<import token>\n<docstring token>\nblue = fg('#46B1C9')\nred = fg('#B54653')\ngreen = fg('#86DEB7')\nreset = attr('reset')\nstyle = style_from_dict({Token.QuestionMark: '#B54653 bold', Token.Selected:\n '#86DEB7 bold', Token.Instruction: '', Token.Answer: '#46B1C9 bold',\n Token.Question: ''})\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Checking i-doit database connection...')\n cnx = None\n try:\n cnx = mysql.connector.connect(user=username, password=passwd, host=\n server, database=db_name)\n print(green + '\\n>>> ' + reset +\n 'Successfully connected to the i-doit database.')\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + '\\n>>> ' + reset +\n 'Something is wrong with your username or password.')\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + '\\n>>> ' + reset + 'Database does not exist.')\n else:\n print(red + '\\n>>> ' + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\ndef api_category_info(category, db_info, connection):\n \"\"\"\n Executes the method 'cmdb.category_info' of the i-doit API for a given category.\n Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.\n\n Parameters\n ----------\n category : string\n The category name.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.\n \"\"\"\n res = {}\n attributes = []\n types = {}\n dialogs = {}\n cat_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.category_info\",\"params\": {\"category\": \"'\n + category + '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n try:\n s = requests.Session()\n cat_request = s.post(api_url, json=cat_body, headers=headers)\n if cat_request.text != '':\n if 'result' in cat_request.json():\n for attr in cat_request.json()['result']:\n new_atr = {}\n new_atr[cat_request.json()['result'][attr]['title']] = attr\n types[cat_request.json()['result'][attr]['title']\n ] = cat_request.json()['result'][attr]['data']['type']\n dialog = cat_request.json().get('result').get(attr).get(\n 'info').get('type')\n d = {}\n if dialog == 'dialog':\n dialog_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.dialog.read\",\"params\": {\"category\": \"'\n + category + '\", \"property\": \"' + attr +\n '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n s = requests.Session()\n dialog_request = s.post(api_url, json=dialog_body,\n headers=headers)\n if dialog_request.text != '':\n values = dialog_request.json().get('result')\n if values != None:\n if len(values) == 1:\n values = values[0]\n if values != None:\n for a in values:\n if type(a) is dict:\n value = a.get('id')\n name = a.get('title')\n d[value] = name\n elif dialog == 'dialog_plus':\n cursor = connection.cursor()\n table = cat_request.json().get('result').get(attr).get(\n 'data').get('sourceTable')\n values = get_dialogs_from_table(table, db_name, cursor)\n if len(d) > 0:\n dialogs[attr] = d\n attributes.append(new_atr)\n res['attributes'] = attributes\n res['types'] = types\n res['dialogs'] = dialogs\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef process_i_doit():\n \"\"\"\n Processes the i-doit CMDB data model, obtaining information about configuration item types, \n relationship types, configuration items and relationship attributes, restrictions between relationships, \n data types of attributes, and values for dialog type attributes.\n\n Returns\n -------\n dict\n Returns the CMDB information (server address, username, password and api key).\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.')\n api_info = api_specification()\n server = api_info.get('server')\n username = api_info.get('username')\n password = api_info.get('password')\n api_key = api_info.get('api_key')\n connection = test_api_connection(server, username, password, api_key)\n if connection == False:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.\\n')\n db_info = db_specification()\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n connection = test_db_connection(server, db_name, username, password)\n if connection == None:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset +\n 'Processing i-doit CMDB data model...')\n constants = api_constants()\n if constants == None:\n process_i_doit()\n else:\n ci_types = constants.get('objectTypes')\n cmdb_data_model['ci_types'] = ci_types\n rel_types = constants.get('relationTypes')\n cmdb_data_model['rel_types'] = rel_types\n categories = [c for c in {**constants.get('categories').get\n ('g'), **constants.get('categories').get('s')}]\n cat_attr_types = category_attributes_types(categories,\n db_info, connection)\n ci_attributes_types = {}\n for ci in ci_types:\n attrs = get_object_attributes(ci, cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n ci_attributes_types[ci] = attrs\n rel_attributes_types = {}\n attrs = get_object_attributes('C__OBJTYPE__RELATION',\n cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n for rel in rel_types:\n rel_attributes_types[rel] = attrs\n cmdb_data_model['ci_attributes'] = {ci: ci_attributes_types\n [ci]['attributes'] for ci in ci_attributes_types}\n cmdb_data_model['ci_attributes_data_types'] = {ci:\n ci_attributes_types[ci]['types'] for ci in\n ci_attributes_types}\n cmdb_data_model['ci_dialog_attributes'] = {ci:\n ci_attributes_types[ci]['dialogs'] for ci in\n ci_attributes_types}\n cmdb_data_model['rel_attributes'] = {rel:\n rel_attributes_types[rel]['attributes'] for rel in\n rel_attributes_types}\n cmdb_data_model['rel_attributes_data_types'] = {rel:\n rel_attributes_types[rel]['types'] for rel in\n rel_attributes_types}\n return api_info\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Checking i-doit database connection...')\n cnx = None\n try:\n cnx = mysql.connector.connect(user=username, password=passwd, host=\n server, database=db_name)\n print(green + '\\n>>> ' + reset +\n 'Successfully connected to the i-doit database.')\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + '\\n>>> ' + reset +\n 'Something is wrong with your username or password.')\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + '\\n>>> ' + reset + 'Database does not exist.')\n else:\n print(red + '\\n>>> ' + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\ndef api_category_info(category, db_info, connection):\n \"\"\"\n Executes the method 'cmdb.category_info' of the i-doit API for a given category.\n Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.\n\n Parameters\n ----------\n category : string\n The category name.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.\n \"\"\"\n res = {}\n attributes = []\n types = {}\n dialogs = {}\n cat_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.category_info\",\"params\": {\"category\": \"'\n + category + '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n try:\n s = requests.Session()\n cat_request = s.post(api_url, json=cat_body, headers=headers)\n if cat_request.text != '':\n if 'result' in cat_request.json():\n for attr in cat_request.json()['result']:\n new_atr = {}\n new_atr[cat_request.json()['result'][attr]['title']] = attr\n types[cat_request.json()['result'][attr]['title']\n ] = cat_request.json()['result'][attr]['data']['type']\n dialog = cat_request.json().get('result').get(attr).get(\n 'info').get('type')\n d = {}\n if dialog == 'dialog':\n dialog_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.dialog.read\",\"params\": {\"category\": \"'\n + category + '\", \"property\": \"' + attr +\n '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n s = requests.Session()\n dialog_request = s.post(api_url, json=dialog_body,\n headers=headers)\n if dialog_request.text != '':\n values = dialog_request.json().get('result')\n if values != None:\n if len(values) == 1:\n values = values[0]\n if values != None:\n for a in values:\n if type(a) is dict:\n value = a.get('id')\n name = a.get('title')\n d[value] = name\n elif dialog == 'dialog_plus':\n cursor = connection.cursor()\n table = cat_request.json().get('result').get(attr).get(\n 'data').get('sourceTable')\n values = get_dialogs_from_table(table, db_name, cursor)\n if len(d) > 0:\n dialogs[attr] = d\n attributes.append(new_atr)\n res['attributes'] = attributes\n res['types'] = types\n res['dialogs'] = dialogs\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef process_i_doit():\n \"\"\"\n Processes the i-doit CMDB data model, obtaining information about configuration item types, \n relationship types, configuration items and relationship attributes, restrictions between relationships, \n data types of attributes, and values for dialog type attributes.\n\n Returns\n -------\n dict\n Returns the CMDB information (server address, username, password and api key).\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.')\n api_info = api_specification()\n server = api_info.get('server')\n username = api_info.get('username')\n password = api_info.get('password')\n api_key = api_info.get('api_key')\n connection = test_api_connection(server, username, password, api_key)\n if connection == False:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset + 'Make sure that i-doit is running.\\n')\n db_info = db_specification()\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n connection = test_db_connection(server, db_name, username, password)\n if connection == None:\n return process_i_doit()\n else:\n print(blue + '\\n>>> ' + reset +\n 'Processing i-doit CMDB data model...')\n constants = api_constants()\n if constants == None:\n process_i_doit()\n else:\n ci_types = constants.get('objectTypes')\n cmdb_data_model['ci_types'] = ci_types\n rel_types = constants.get('relationTypes')\n cmdb_data_model['rel_types'] = rel_types\n categories = [c for c in {**constants.get('categories').get\n ('g'), **constants.get('categories').get('s')}]\n cat_attr_types = category_attributes_types(categories,\n db_info, connection)\n ci_attributes_types = {}\n for ci in ci_types:\n attrs = get_object_attributes(ci, cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n ci_attributes_types[ci] = attrs\n rel_attributes_types = {}\n attrs = get_object_attributes('C__OBJTYPE__RELATION',\n cat_attr_types)\n if attrs == None:\n process_i_doit()\n else:\n for rel in rel_types:\n rel_attributes_types[rel] = attrs\n cmdb_data_model['ci_attributes'] = {ci: ci_attributes_types\n [ci]['attributes'] for ci in ci_attributes_types}\n cmdb_data_model['ci_attributes_data_types'] = {ci:\n ci_attributes_types[ci]['types'] for ci in\n ci_attributes_types}\n cmdb_data_model['ci_dialog_attributes'] = {ci:\n ci_attributes_types[ci]['dialogs'] for ci in\n ci_attributes_types}\n cmdb_data_model['rel_attributes'] = {rel:\n rel_attributes_types[rel]['attributes'] for rel in\n rel_attributes_types}\n cmdb_data_model['rel_attributes_data_types'] = {rel:\n rel_attributes_types[rel]['types'] for rel in\n rel_attributes_types}\n return api_info\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Checking i-doit database connection...')\n cnx = None\n try:\n cnx = mysql.connector.connect(user=username, password=passwd, host=\n server, database=db_name)\n print(green + '\\n>>> ' + reset +\n 'Successfully connected to the i-doit database.')\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + '\\n>>> ' + reset +\n 'Something is wrong with your username or password.')\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + '\\n>>> ' + reset + 'Database does not exist.')\n else:\n print(red + '\\n>>> ' + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\ndef api_category_info(category, db_info, connection):\n \"\"\"\n Executes the method 'cmdb.category_info' of the i-doit API for a given category.\n Gets the attributes associated with a category, its data types and the available values of the dialog type attributes.\n\n Parameters\n ----------\n category : string\n The category name.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the category.\n \"\"\"\n res = {}\n attributes = []\n types = {}\n dialogs = {}\n cat_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.category_info\",\"params\": {\"category\": \"'\n + category + '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n server = db_info.get('server')\n username = db_info.get('username')\n password = db_info.get('password')\n db_name = db_info.get('db_name')\n try:\n s = requests.Session()\n cat_request = s.post(api_url, json=cat_body, headers=headers)\n if cat_request.text != '':\n if 'result' in cat_request.json():\n for attr in cat_request.json()['result']:\n new_atr = {}\n new_atr[cat_request.json()['result'][attr]['title']] = attr\n types[cat_request.json()['result'][attr]['title']\n ] = cat_request.json()['result'][attr]['data']['type']\n dialog = cat_request.json().get('result').get(attr).get(\n 'info').get('type')\n d = {}\n if dialog == 'dialog':\n dialog_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.dialog.read\",\"params\": {\"category\": \"'\n + category + '\", \"property\": \"' + attr +\n '\", \"apikey\": \"' + apikey +\n '\",\"language\": \"en\"},\"id\": 1}')\n s = requests.Session()\n dialog_request = s.post(api_url, json=dialog_body,\n headers=headers)\n if dialog_request.text != '':\n values = dialog_request.json().get('result')\n if values != None:\n if len(values) == 1:\n values = values[0]\n if values != None:\n for a in values:\n if type(a) is dict:\n value = a.get('id')\n name = a.get('title')\n d[value] = name\n elif dialog == 'dialog_plus':\n cursor = connection.cursor()\n table = cat_request.json().get('result').get(attr).get(\n 'data').get('sourceTable')\n values = get_dialogs_from_table(table, db_name, cursor)\n if len(d) > 0:\n dialogs[attr] = d\n attributes.append(new_atr)\n res['attributes'] = attributes\n res['types'] = types\n res['dialogs'] = dialogs\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\ndef test_db_connection(server, db_name, username, passwd):\n \"\"\"\n Tests the access to the CMDB database.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n db_name: string\n The CMDB database name.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n\n \"\"\"\n print(blue + '\\n>>> ' + reset + 'Checking i-doit database connection...')\n cnx = None\n try:\n cnx = mysql.connector.connect(user=username, password=passwd, host=\n server, database=db_name)\n print(green + '\\n>>> ' + reset +\n 'Successfully connected to the i-doit database.')\n except mysql.connector.Error as err:\n if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:\n print(red + '\\n>>> ' + reset +\n 'Something is wrong with your username or password.')\n elif err.errno == errorcode.ER_BAD_DB_ERROR:\n print(red + '\\n>>> ' + reset + 'Database does not exist.')\n else:\n print(red + '\\n>>> ' + reset + str(err))\n return cnx\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\n<function token>\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\ndef test_api_connection(server, username, password, api_key):\n \"\"\"\n Tests the access to the CMDB.\n\n Parameters\n ----------\n server : string\n The IP address of the CMDB server.\n\n username : string\n The CMDB username.\n\n password : string\n The CMDB password.\n\n api_key: string\n The CMDB API key.\n\n Returns\n -------\n boolean\n Returns true if the connection was successful and false otherwise.\n \"\"\"\n global api_url\n api_url = 'http://' + server + '/i-doit/src/jsonrpc.php'\n global headers\n headers = {}\n headers['Content-Type'] = 'application/json'\n headers['X-RPC-Auth-Username'] = username\n headers['X-RPC-Auth-Password'] = password\n global apikey\n apikey = api_key\n print(blue + '\\n>>> ' + reset + 'Checking API connection...')\n login_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.login\",\"params\": {\"apikey\": \"' +\n apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n login_request = s.post(api_url, json=login_body, headers=headers)\n login = login_request.json()\n if 'error' in login:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n else:\n print(green + '\\n>>> ' + reset + 'Successfully connected.')\n return True\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n 'Unable to connect to the API. Please verify the connection information.'\n )\n return False\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\n<function token>\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\n<function token>\n\n\ndef category_attributes_types(categories, db_info, connection):\n \"\"\"\n Gets the attributes its data types and the available values of the dialog type attributes associated with all the categories in the CMDB.\n\n Parameters\n ----------\n categories : list\n The category names.\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with all the categories.\n \"\"\"\n attributes = {}\n for cat in categories:\n attributes[cat] = {}\n category_info = api_category_info(cat, db_info, connection)\n attr = {}\n for a in category_info.get('attributes'):\n for key in a:\n attr[key] = a[key]\n attributes[cat]['attributes'] = {k: d for d, k in attr.items()}\n types = category_info.get('types')\n attributes[cat]['types'] = {attr.get(a): types.get(a) for a in types}\n attributes[cat]['dialogs'] = category_info.get('dialogs')\n return attributes\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\ndef db_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and database name) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The database information (server address, username, password and database name).\n\n \"\"\"\n db_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your database server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your database name:', 'name': 'db_name',\n 'validate': NotEmpty}, {'type': 'input', 'message':\n 'Enter your database username:', 'name': 'username', 'validate':\n NotEmpty}, {'type': 'password', 'message':\n 'Enter your database password:', 'name': 'password'}]\n db_specification_answer = prompt(db_specification_question, style=style)\n return db_specification_answer\n\n\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\n<function token>\n<function token>\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\ndef get_dialogs_from_table(table, db, cursor):\n values = {}\n if table != None:\n name = str(table) + '__id'\n desc = str(table) + '__title'\n query = ('SELECT ' + name + ', ' + desc + ' FROM ' + db + '.' +\n table + ';')\n cursor.execute(query)\n for t in cursor:\n name, value = t\n values[name] = value\n return values\n\n\n<function token>\n<function token>\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef get_object_attributes(ci, cat_attr_types):\n \"\"\"\n Executes the method 'cmdb.object_type_categories.read' of the i-doit API for a given object type.\n Gets the categories associated with an object type.\n Computes the attributes, its data types and the available values of the dialog type attributes of the object type, based on the categories associated with that type.\n\n Parameters\n ----------\n ci : string\n The object name.\n\n cat_attr_types : dict\n The attributes, its data types and the available values of the dialog type attributes, associated with every category, .\n\n Returns\n -------\n dict\n Returns the attributes, its data types and the available values of the dialog type attributes associated with the object type.\n \"\"\"\n res = {}\n object_attributes = {}\n attributes_types = {}\n dialogs = {}\n obj_categories_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"cmdb.object_type_categories.read\",\"params\": {\"type\": \"'\n + ci + '\", \"apikey\": \"' + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n obj_categories_request = s.post(api_url, json=obj_categories_body,\n headers=headers)\n if obj_categories_request.text != '':\n if 'result' in obj_categories_request.json():\n if 'catg' in obj_categories_request.json()['result']:\n for cat_g in obj_categories_request.json()['result']['catg'\n ]:\n cat = cat_g['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n if 'cats' in obj_categories_request.json()['result']:\n for cat_s in obj_categories_request.json()['result']['cats'\n ]:\n cat = cat_s['const']\n if cat in cat_attr_types:\n dialogs.update(cat_attr_types.get(cat).get(\n 'dialogs'))\n attrs = cat_attr_types.get(cat).get('attributes')\n types = cat_attr_types.get(cat).get('types')\n object_attributes.update(attrs)\n attributes_types.update(types)\n res['dialogs'] = dialogs\n res['attributes'] = object_attributes\n res['types'] = attributes_types\n return res\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n\n\ndef api_constants():\n \"\"\"\n Executes the method 'idoit.contants' of the i-doit API.\n Gets the configuration item types, relationship types, and categories present in the CMDB.\n\n Returns\n -------\n boolean\n Returns the result of the execution of the method.\n \"\"\"\n constants_body = json.loads(\n '{\"version\": \"2.0\",\"method\": \"idoit.constants\",\"params\": {\"apikey\": \"'\n + apikey + '\",\"language\": \"en\"},\"id\": 1}')\n try:\n s = requests.Session()\n constants_request = s.post(api_url, json=constants_body, headers=\n headers)\n constants = constants_request.json()\n return constants.get('result')\n except requests.exceptions.RequestException:\n print(red + '\\n>>> ' + reset +\n \"\"\"Unable to connect to the API. Please verify the connection information.\n\"\"\"\n )\n return None\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n\n\ndef api_specification():\n \"\"\"\n Asks the user to enter the necessary information (server address, username, password and api key) to access the i-doit CMDB.\n\n Returns\n -------\n dict\n The CMDB information (server address, username, password and api key).\n \"\"\"\n api_specification_question = [{'type': 'input', 'message':\n \"Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where 'y' is optional):\"\n , 'name': 'server', 'validate': AddressValidator}, {'type': 'input',\n 'message': 'Enter your CMDB username:', 'name': 'username',\n 'validate': NotEmpty}, {'type': 'password', 'message':\n 'Enter your CMDB password:', 'name': 'password'}, {'type': 'input',\n 'message': 'Enter your API key:', 'name': 'api_key', 'validate':\n NotEmpty}]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n\n def validate(self, document):\n ok = document.text != '' and document.text != None\n if not ok:\n raise ValidationError(message='Please enter something',\n cursor_position=len(document.text))\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n\n\nclass NotEmpty(Validator):\n <function token>\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<class token>\n\n\nclass AddressValidator(Validator):\n\n def validate(self, document):\n ok = regex.match('(\\\\d{1,3}\\\\.){3}\\\\d{1,3}', document.text)\n if not ok:\n raise ValidationError(message=\n 'Please enter a valid IP address.', cursor_position=len(\n document.text))\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<class token>\n\n\nclass AddressValidator(Validator):\n <function token>\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<docstring token>\n<assignment token>\n<class token>\n<class token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,032 |
f0dcb759f2b16cbd28780ac838342755a80d8deb
|
from django.test import RequestFactory
from django.urls import reverse
from . models import User
class TestViews:
def login_detail(self):
path=reverse()
class RegisterModel:
def test_save(self):
register = RestRegistration.objects.create(
email="[email protected]",
first_name="shahazad",
last_name="shaikh",
password=500,
confirm_password=500,
)
assert register.email == "[email protected]"
assert register.password == 500
assert register.confirm_password == 500
assert register.email == "[email protected]"
def setUp(self):
valid_payload = {
'title': 'test',
'description': "test",
'color': "test",
'label': 'test'}
response = client.post(
reverse('createnote'),
data=json.dumps(valid_payload),
content_type='application/json'
)
assert (response.status_code)
def update(self):
valid_payload = {
'title': 'test',
'description': "test",
'color': "test",
'label': 'test'}
response = client.post(
reverse('updatenote'),
data=json.dumps(valid_payload),
content_type='application/json'
)
assert (response.status_code)
|
[
"from django.test import RequestFactory\nfrom django.urls import reverse\nfrom . models import User\n\nclass TestViews:\n def login_detail(self):\n path=reverse()\n\n\nclass RegisterModel:\n def test_save(self):\n register = RestRegistration.objects.create(\n email=\"[email protected]\",\n first_name=\"shahazad\",\n last_name=\"shaikh\",\n password=500,\n confirm_password=500,\n )\n assert register.email == \"[email protected]\"\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == \"[email protected]\"\n\ndef setUp(self):\n valid_payload = {\n 'title': 'test',\n 'description': \"test\",\n 'color': \"test\",\n 'label': 'test'}\n\n response = client.post(\n reverse('createnote'),\n data=json.dumps(valid_payload),\n content_type='application/json'\n )\n\n assert (response.status_code)\n\n\ndef update(self):\n valid_payload = {\n 'title': 'test',\n 'description': \"test\",\n 'color': \"test\",\n 'label': 'test'}\n\n response = client.post(\n reverse('updatenote'),\n data=json.dumps(valid_payload),\n content_type='application/json'\n )\n assert (response.status_code)\n\n",
"from django.test import RequestFactory\nfrom django.urls import reverse\nfrom .models import User\n\n\nclass TestViews:\n\n def login_detail(self):\n path = reverse()\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\ndef setUp(self):\n valid_payload = {'title': 'test', 'description': 'test', 'color':\n 'test', 'label': 'test'}\n response = client.post(reverse('createnote'), data=json.dumps(\n valid_payload), content_type='application/json')\n assert response.status_code\n\n\ndef update(self):\n valid_payload = {'title': 'test', 'description': 'test', 'color':\n 'test', 'label': 'test'}\n response = client.post(reverse('updatenote'), data=json.dumps(\n valid_payload), content_type='application/json')\n assert response.status_code\n",
"<import token>\n\n\nclass TestViews:\n\n def login_detail(self):\n path = reverse()\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\ndef setUp(self):\n valid_payload = {'title': 'test', 'description': 'test', 'color':\n 'test', 'label': 'test'}\n response = client.post(reverse('createnote'), data=json.dumps(\n valid_payload), content_type='application/json')\n assert response.status_code\n\n\ndef update(self):\n valid_payload = {'title': 'test', 'description': 'test', 'color':\n 'test', 'label': 'test'}\n response = client.post(reverse('updatenote'), data=json.dumps(\n valid_payload), content_type='application/json')\n assert response.status_code\n",
"<import token>\n\n\nclass TestViews:\n\n def login_detail(self):\n path = reverse()\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\ndef setUp(self):\n valid_payload = {'title': 'test', 'description': 'test', 'color':\n 'test', 'label': 'test'}\n response = client.post(reverse('createnote'), data=json.dumps(\n valid_payload), content_type='application/json')\n assert response.status_code\n\n\n<function token>\n",
"<import token>\n\n\nclass TestViews:\n\n def login_detail(self):\n path = reverse()\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\n<function token>\n<function token>\n",
"<import token>\n\n\nclass TestViews:\n <function token>\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\n<function token>\n<function token>\n",
"<import token>\n<class token>\n\n\nclass RegisterModel:\n\n def test_save(self):\n register = RestRegistration.objects.create(email=\n '[email protected]', first_name='shahazad', last_name=\n 'shaikh', password=500, confirm_password=500)\n assert register.email == '[email protected]'\n assert register.password == 500\n assert register.confirm_password == 500\n assert register.email == '[email protected]'\n\n\n<function token>\n<function token>\n",
"<import token>\n<class token>\n\n\nclass RegisterModel:\n <function token>\n assert register.email == '[email protected]'\n\n\n<function token>\n<function token>\n",
"<import token>\n<class token>\n<class token>\n<function token>\n<function token>\n"
] | false |
99,033 |
30abbec6be4dae5cc39435c49a9d896700be8591
|
from collections import deque
n, k, q = (int(x) for x in input().split())
result = list()
arr = deque([int(x) for x in input().split()])
arr.rotate(k)
for _ in range(q):
result.append(arr[int(input())])
[print(r) for r in result]
|
[
"from collections import deque\n\nn, k, q = (int(x) for x in input().split())\nresult = list()\narr = deque([int(x) for x in input().split()])\narr.rotate(k)\nfor _ in range(q):\n result.append(arr[int(input())])\n\n[print(r) for r in result]",
"from collections import deque\nn, k, q = (int(x) for x in input().split())\nresult = list()\narr = deque([int(x) for x in input().split()])\narr.rotate(k)\nfor _ in range(q):\n result.append(arr[int(input())])\n[print(r) for r in result]\n",
"<import token>\nn, k, q = (int(x) for x in input().split())\nresult = list()\narr = deque([int(x) for x in input().split()])\narr.rotate(k)\nfor _ in range(q):\n result.append(arr[int(input())])\n[print(r) for r in result]\n",
"<import token>\n<assignment token>\narr.rotate(k)\nfor _ in range(q):\n result.append(arr[int(input())])\n[print(r) for r in result]\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
99,034 |
0263e283792b480cc600f7afbb3a888921fff7f5
|
"""
In situations where we are developing an application or library
that will be use to create long computation reports or results,
we want to execute the long process only when all the project tests
are passed.
pytest provide a great support for creating test suits,
parallel execution, reports, command line, IDE of CI integration,
and so forth, so the idea is to write these long computation code
in test from, group them in studios and extend pytest with a plugin
that allow us to:
- Ignore these long computation studies and run only the regular ones.
- Sort all the involved tests so the study will be executed only when
all dependences are passed.
- Define the studies and dependences in a easy way.
- Don't interfere with normal pytest use.
For a more detailed refecences, please read README.md or
visit https://github.com/asteriogonzalez/pytest-study
"""
from __future__ import print_function
try:
import wingdbstub
except ImportError:
pass
import re
import pytest
from blessings import Terminal
term = Terminal()
MARKS = ['study', 'pre'] # match 1st ocurrence
def parse_args(args, kwargs):
"update kwargs with positional arguments"
positional = ['name', 'order']
kw = {'name': 'default', 'order': 1000}
kw.update(kwargs)
for key in kwargs:
if key in positional:
positional.remove(key)
for i, val in enumerate(args):
kw[positional[i]] = val
return kw
def get_study_name(item):
"Try to get the name where the test belongs to, or '' when is free"
for mark in MARKS:
marker = item.get_marker(mark)
if marker:
return parse_args(marker.args, marker.kwargs)['name']
return ''
def get_FQN(item):
"Get the Full Qualified Name of a test item"
names = []
for x in item.listchain():
if not isinstance(x, (pytest.Session, pytest.Instance)):
names.append(x.name)
return ':'.join(names)
# ------------------------------------------
# Skip studio tests
# ------------------------------------------
def pytest_addoption(parser):
"Add the --runstudy option in command line"
# parser.addoption("--runstudy", action="store_true",
# default=False, help="run studio processes")
parser.addoption("--show_order", action="store_true",
default=False,
help="""show tests and studies order execution
and which are selected for execution.""")
parser.addoption("--runstudy", action="store", type="string",
default='', metavar='all|reg expression',
help="""regular expression for the studies names
('all' runs all).
None is selected by default.""")
def pytest_collection_modifyitems(config, items):
"""Remove all study tests if --runstudy is not selected
and reorder the study dependences to be executed incrementaly
so any failed study test will abort the complete sequence.
- Mark a test with @pytest.mark.study to consider part of a study.
- Mark a test with @pytest.mark.study and named 'test_study_xxxx()'
to be executed at the end when all previous test study functions
are passed.
"""
# check if studio tests myst be skipped
run_study = config.getoption("--runstudy")
# 'all' will match all studies, '' will not match anything
run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)
# --runstudy given in cli: do not skip study tests and
test_selected = list()
test_skipped = list()
groups = dict()
incremental = pytest.mark.incremental()
def add():
"helper for gathering test info"
marker = item.get_marker(mark)
kwargs = parse_args(marker.args, marker.kwargs)
group_name = kwargs['name']
group = groups.setdefault(group_name, dict())
group.setdefault(mark, list()).append((kwargs, item))
item.add_marker(incremental)
# place every test in regular, prerequisite and studies
# group by name
for item in items:
for mark in set(item.keywords.keys()).intersection(MARKS):
add()
break
else:
test_selected.append(item)
def sort(a, b):
"Sort two items by order priority"
return cmp(a[0]['order'], b[0]['order'])
# use studies precedence to built the global sequence order
mandatory = 'study' # mandatory mark for global sorting: study
studies = list()
for name, info in groups.items():
studies.extend(info.get(mandatory, []))
studies.sort(sort)
def append(tests, where):
"helper to add the test item from info structure"
for test in tests:
test = test[1]
if test not in where:
where.append(test)
# select only the test that are going to be launched
width = 0
regexp = re.compile(run_study, re.I | re.DOTALL)
for study in studies:
group_name = study[0]['name']
width = max(width, len(group_name))
where = test_selected if regexp.search(group_name) else test_skipped
for mark, seq in groups[group_name].items():
if mark == mandatory:
continue
seq.sort(sort)
append(seq, where)
append([study], where)
if config.getoption("--show_order") or config.getoption("--debug"):
fmt = "{0:>3d} [{1:>%s}] {2}" % width
for i, item in enumerate(test_selected + test_skipped):
study = get_study_name(item)
fqn = get_FQN(item)
line = fmt.format(i, study, fqn)
if item in test_selected:
line = term.green('+' + line)
else:
line = term.yellow('-' + line)
print(line)
# we make the --runstudy check at the end to be able to show
# test order with --show_order or --debig options
# reorder tests by group name and replace items IN-PLACE
if run_study:
items[:] = test_selected
return
skip_test = pytest.mark.skip(reason="need --runstudy option to run")
for item in items:
if set(item.keywords.keys()).intersection(MARKS):
item.add_marker(skip_test)
# ------------------------------------------
# incremental failure chain (from pytest doc)
# ------------------------------------------
def pytest_runtest_makereport(item, call):
"set the last failed test"
if "incremental" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_runtest_setup(item):
"Abort the execution stage if a previous incremental test has failed"
if "incremental" in item.keywords:
previousfailed = getattr(item.parent, "_previousfailed", None)
if previousfailed is not None:
pytest.xfail("previous test failed (%s)" % previousfailed.name)
|
[
"\"\"\"\nIn situations where we are developing an application or library\nthat will be use to create long computation reports or results,\nwe want to execute the long process only when all the project tests\nare passed.\n\npytest provide a great support for creating test suits,\nparallel execution, reports, command line, IDE of CI integration,\nand so forth, so the idea is to write these long computation code\nin test from, group them in studios and extend pytest with a plugin\nthat allow us to:\n\n- Ignore these long computation studies and run only the regular ones.\n- Sort all the involved tests so the study will be executed only when\n all dependences are passed.\n- Define the studies and dependences in a easy way.\n- Don't interfere with normal pytest use.\n\nFor a more detailed refecences, please read README.md or\nvisit https://github.com/asteriogonzalez/pytest-study\n\"\"\"\nfrom __future__ import print_function\ntry:\n import wingdbstub\nexcept ImportError:\n pass\n\nimport re\nimport pytest\nfrom blessings import Terminal\n\nterm = Terminal()\n\nMARKS = ['study', 'pre'] # match 1st ocurrence\n\n\ndef parse_args(args, kwargs):\n \"update kwargs with positional arguments\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n\n return kw\n\n\ndef get_study_name(item):\n \"Try to get the name where the test belongs to, or '' when is free\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"Get the Full Qualified Name of a test item\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n\n return ':'.join(names)\n\n# ------------------------------------------\n# Skip studio tests\n# ------------------------------------------\n\n\ndef pytest_addoption(parser):\n \"Add the --runstudy option in command line\"\n # parser.addoption(\"--runstudy\", action=\"store_true\",\n # default=False, help=\"run studio processes\")\n\n parser.addoption(\"--show_order\", action=\"store_true\",\n default=False,\n help=\"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\")\n\n parser.addoption(\"--runstudy\", action=\"store\", type=\"string\",\n default='', metavar='all|reg expression',\n help=\"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\")\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n # check if studio tests myst be skipped\n run_study = config.getoption(\"--runstudy\")\n # 'all' will match all studies, '' will not match anything\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n # --runstudy given in cli: do not skip study tests and\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"helper for gathering test info\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n\n # place every test in regular, prerequisite and studies\n # group by name\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"Sort two items by order priority\"\n return cmp(a[0]['order'], b[0]['order'])\n\n # use studies precedence to built the global sequence order\n mandatory = 'study' # mandatory mark for global sorting: study\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"helper to add the test item from info structure\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n\n # select only the test that are going to be launched\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n\n if config.getoption(\"--show_order\") or config.getoption(\"--debug\"):\n fmt = \"{0:>3d} [{1:>%s}] {2}\" % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n\n # we make the --runstudy check at the end to be able to show\n # test order with --show_order or --debig options\n # reorder tests by group name and replace items IN-PLACE\n if run_study:\n items[:] = test_selected\n return\n\n skip_test = pytest.mark.skip(reason=\"need --runstudy option to run\")\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n# ------------------------------------------\n# incremental failure chain (from pytest doc)\n# ------------------------------------------\n\n\ndef pytest_runtest_makereport(item, call):\n \"set the last failed test\"\n if \"incremental\" in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n \"Abort the execution stage if a previous incremental test has failed\"\n if \"incremental\" in item.keywords:\n previousfailed = getattr(item.parent, \"_previousfailed\", None)\n if previousfailed is not None:\n pytest.xfail(\"previous test failed (%s)\" % previousfailed.name)\n",
"<docstring token>\nfrom __future__ import print_function\ntry:\n import wingdbstub\nexcept ImportError:\n pass\nimport re\nimport pytest\nfrom blessings import Terminal\nterm = Terminal()\nMARKS = ['study', 'pre']\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\ndef get_study_name(item):\n \"\"\"Try to get the name where the test belongs to, or '' when is free\"\"\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n \"\"\"Abort the execution stage if a previous incremental test has failed\"\"\"\n if 'incremental' in item.keywords:\n previousfailed = getattr(item.parent, '_previousfailed', None)\n if previousfailed is not None:\n pytest.xfail('previous test failed (%s)' % previousfailed.name)\n",
"<docstring token>\n<import token>\ntry:\n import wingdbstub\nexcept ImportError:\n pass\n<import token>\nterm = Terminal()\nMARKS = ['study', 'pre']\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\ndef get_study_name(item):\n \"\"\"Try to get the name where the test belongs to, or '' when is free\"\"\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n \"\"\"Abort the execution stage if a previous incremental test has failed\"\"\"\n if 'incremental' in item.keywords:\n previousfailed = getattr(item.parent, '_previousfailed', None)\n if previousfailed is not None:\n pytest.xfail('previous test failed (%s)' % previousfailed.name)\n",
"<docstring token>\n<import token>\ntry:\n import wingdbstub\nexcept ImportError:\n pass\n<import token>\n<assignment token>\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\ndef get_study_name(item):\n \"\"\"Try to get the name where the test belongs to, or '' when is free\"\"\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n \"\"\"Abort the execution stage if a previous incremental test has failed\"\"\"\n if 'incremental' in item.keywords:\n previousfailed = getattr(item.parent, '_previousfailed', None)\n if previousfailed is not None:\n pytest.xfail('previous test failed (%s)' % previousfailed.name)\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\ndef get_study_name(item):\n \"\"\"Try to get the name where the test belongs to, or '' when is free\"\"\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\ndef pytest_runtest_setup(item):\n \"\"\"Abort the execution stage if a previous incremental test has failed\"\"\"\n if 'incremental' in item.keywords:\n previousfailed = getattr(item.parent, '_previousfailed', None)\n if previousfailed is not None:\n pytest.xfail('previous test failed (%s)' % previousfailed.name)\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\ndef get_study_name(item):\n \"\"\"Try to get the name where the test belongs to, or '' when is free\"\"\"\n for mark in MARKS:\n marker = item.get_marker(mark)\n if marker:\n return parse_args(marker.args, marker.kwargs)['name']\n return ''\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n\n\ndef parse_args(args, kwargs):\n \"\"\"update kwargs with positional arguments\"\"\"\n positional = ['name', 'order']\n kw = {'name': 'default', 'order': 1000}\n kw.update(kwargs)\n for key in kwargs:\n if key in positional:\n positional.remove(key)\n for i, val in enumerate(args):\n kw[positional[i]] = val\n return kw\n\n\n<function token>\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\ndef pytest_runtest_makereport(item, call):\n \"\"\"set the last failed test\"\"\"\n if 'incremental' in item.keywords:\n if call.excinfo is not None:\n parent = item.parent\n parent._previousfailed = item\n\n\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef get_FQN(item):\n \"\"\"Get the Full Qualified Name of a test item\"\"\"\n names = []\n for x in item.listchain():\n if not isinstance(x, (pytest.Session, pytest.Instance)):\n names.append(x.name)\n return ':'.join(names)\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n\n\ndef pytest_addoption(parser):\n \"\"\"Add the --runstudy option in command line\"\"\"\n parser.addoption('--show_order', action='store_true', default=False,\n help=\n \"\"\"show tests and studies order execution\n and which are selected for execution.\"\"\"\n )\n parser.addoption('--runstudy', action='store', type='string', default=\n '', metavar='all|reg expression', help=\n \"\"\"regular expression for the studies names\n ('all' runs all).\n None is selected by default.\"\"\"\n )\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Remove all study tests if --runstudy is not selected\n and reorder the study dependences to be executed incrementaly\n so any failed study test will abort the complete sequence.\n\n - Mark a test with @pytest.mark.study to consider part of a study.\n - Mark a test with @pytest.mark.study and named 'test_study_xxxx()'\n to be executed at the end when all previous test study functions\n are passed.\n \"\"\"\n run_study = config.getoption('--runstudy')\n run_study = {'': '(?!x)x', 'all': '.*'}.get(run_study, run_study)\n test_selected = list()\n test_skipped = list()\n groups = dict()\n incremental = pytest.mark.incremental()\n\n def add():\n \"\"\"helper for gathering test info\"\"\"\n marker = item.get_marker(mark)\n kwargs = parse_args(marker.args, marker.kwargs)\n group_name = kwargs['name']\n group = groups.setdefault(group_name, dict())\n group.setdefault(mark, list()).append((kwargs, item))\n item.add_marker(incremental)\n for item in items:\n for mark in set(item.keywords.keys()).intersection(MARKS):\n add()\n break\n else:\n test_selected.append(item)\n\n def sort(a, b):\n \"\"\"Sort two items by order priority\"\"\"\n return cmp(a[0]['order'], b[0]['order'])\n mandatory = 'study'\n studies = list()\n for name, info in groups.items():\n studies.extend(info.get(mandatory, []))\n studies.sort(sort)\n\n def append(tests, where):\n \"\"\"helper to add the test item from info structure\"\"\"\n for test in tests:\n test = test[1]\n if test not in where:\n where.append(test)\n width = 0\n regexp = re.compile(run_study, re.I | re.DOTALL)\n for study in studies:\n group_name = study[0]['name']\n width = max(width, len(group_name))\n where = test_selected if regexp.search(group_name) else test_skipped\n for mark, seq in groups[group_name].items():\n if mark == mandatory:\n continue\n seq.sort(sort)\n append(seq, where)\n append([study], where)\n if config.getoption('--show_order') or config.getoption('--debug'):\n fmt = '{0:>3d} [{1:>%s}] {2}' % width\n for i, item in enumerate(test_selected + test_skipped):\n study = get_study_name(item)\n fqn = get_FQN(item)\n line = fmt.format(i, study, fqn)\n if item in test_selected:\n line = term.green('+' + line)\n else:\n line = term.yellow('-' + line)\n print(line)\n if run_study:\n items[:] = test_selected\n return\n skip_test = pytest.mark.skip(reason='need --runstudy option to run')\n for item in items:\n if set(item.keywords.keys()).intersection(MARKS):\n item.add_marker(skip_test)\n\n\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<code token>\n<import token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,035 |
ac79e0d07770b23690008e642d56464adffd3c7d
|
hit = 0
location = 0
right = 1
skip = False
with open("day3.txt") as f:
for line in f.readlines():
if skip:
skip = False
continue
trees = line.strip()
print(location, trees)
if trees[location] == '#':
hit += 1
location += right
location = location % len(trees)
skip = True
print(hit)
|
[
"hit = 0\n\nlocation = 0\nright = 1\nskip = False\n\nwith open(\"day3.txt\") as f:\n for line in f.readlines():\n if skip:\n skip = False\n continue\n\n trees = line.strip()\n print(location, trees)\n if trees[location] == '#':\n hit += 1\n location += right\n location = location % len(trees)\n skip = True\n\nprint(hit)\n",
"hit = 0\nlocation = 0\nright = 1\nskip = False\nwith open('day3.txt') as f:\n for line in f.readlines():\n if skip:\n skip = False\n continue\n trees = line.strip()\n print(location, trees)\n if trees[location] == '#':\n hit += 1\n location += right\n location = location % len(trees)\n skip = True\nprint(hit)\n",
"<assignment token>\nwith open('day3.txt') as f:\n for line in f.readlines():\n if skip:\n skip = False\n continue\n trees = line.strip()\n print(location, trees)\n if trees[location] == '#':\n hit += 1\n location += right\n location = location % len(trees)\n skip = True\nprint(hit)\n",
"<assignment token>\n<code token>\n"
] | false |
99,036 |
bd366bc533f5a291d2264bedb0108f08fcd67914
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
__author__ = 'LiBin'
__mtime__ = '16/6/13'
┏┓ ┏┓
┏┛┻━━━┛┻┓
┃ ☃ ┃
┃ ┳┛ ┗┳ ┃
┃ ┻ ┃
┗━┓ ┏━┛
┃ ┗━━━┓
┃ 神兽保佑 ┣┓
┃ 永无BUG! ┏┛
┗┓┓┏━┳┓┏┛
┃┫┫ ┃┫┫
┗┻┛ ┗┻┛
"""
import json
import requests
import time
import hashlib
import random
import pymysql
__version__ = '0.1'
class PublicLibrary(object):
def __int__(self):
pass
def getCoding(self, strInput):
u"""
获取编码格式
"""
if isinstance(strInput, unicode):
return "unicode"
try:
strInput.decode("utf8")
return 'utf8'
except:
pass
try:
strInput.decode("gbk")
return 'gbk'
except:
pass
def tran2UTF8(self, strInput):
"""
转化为utf8格式
"""
strCodingFmt = self.getCoding(strInput)
if strCodingFmt == "utf8":
return strInput
elif strCodingFmt == "unicode":
return strInput.encode("utf8")
elif strCodingFmt == "gbk":
return strInput.decode("gbk").encode("utf8")
def tran2GBK(self, strInput):
"""
转化为gbk格式
"""
strCodingFmt = self.getCoding(strInput)
if strCodingFmt == "gbk":
return strInput
elif strCodingFmt == "unicode":
return strInput.encode("gbk")
elif strCodingFmt == "utf8":
return strInput.decode("utf8").encode("gbk")
def md5(self, init_str):
"""
md5加密
"""
m = hashlib.md5()
m.update(init_str)
return m.hexdigest()
def eval_dict(self, strInput):
u"""接收字符串直接转成需要类型,例
| eval dict | str |
"""
strInput = eval(strInput)
return strInput
def random_num(self, num):
"""
随机出给出数字位数的数字
"""
number = ''
for i in random.sample(range(10), int(num)):
number += ''.join(str(i))
return number
def req(
self,
login_msg,
url,
method,
data=None,
headers=None):
u"""专用,有登录状态,例
| run interface test tenant | login_msg,url,method,data,headers
"""
session = requests.Session()
url = self.tran2UTF8(url)
method = self.tran2UTF8(method)
if login_msg:
login_msg = self.eval_dict(login_msg)
md5_pwd = self.md5(login_msg['passwd'])
login_msg['passwd'] = md5_pwd
if data:
data = self.eval_dict(data)
if headers:
headers = self.eval_dict(headers)
else:
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
results = 'connection error'
# 先登录
r = session.post('https://xxxxxx.cn/login',
data=json.dumps(login_msg), headers=headers)
print ("*******************************")
print (u"登录状态信息")
print (r.status_code)
print (r.content)
print ("*******************************")
try:
if method == "post":
if isinstance(data, dict):
data = json.dumps(data)
results = session.post(
url, data=data, headers=headers, verify=False)
elif method == "get":
results = session.get(
url, params=data, headers=headers, verify=False)
elif method == 'delete':
results = session.delete(url, headers=headers, verify=False)
return results
except requests.ConnectionError as e:
return e
def con_db(self, sql):
db = pymysql.connect(
host="1.1.5.2",
user="xxx",
passwd="xxx",
db="xxx",
charset='utf8')
cursor = db.cursor()
cursor.execute(sql)
data = cursor.fetchone()
db.close()
return data
|
[
"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\n__author__ = 'LiBin'\n__mtime__ = '16/6/13'\n ┏┓ ┏┓\n ┏┛┻━━━┛┻┓\n ┃ ☃ ┃\n ┃ ┳┛ ┗┳ ┃\n ┃ ┻ ┃\n ┗━┓ ┏━┛\n ┃ ┗━━━┓\n ┃ 神兽保佑 ┣┓\n ┃ 永无BUG! ┏┛\n ┗┓┓┏━┳┓┏┛\n ┃┫┫ ┃┫┫\n ┗┻┛ ┗┻┛\n\"\"\"\n\nimport json\nimport requests\nimport time\nimport hashlib\nimport random\nimport pymysql\n\n__version__ = '0.1'\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return \"unicode\"\n try:\n strInput.decode(\"utf8\")\n return 'utf8'\n except:\n pass\n try:\n strInput.decode(\"gbk\")\n return 'gbk'\n except:\n pass\n\n def tran2UTF8(self, strInput):\n \"\"\"\n 转化为utf8格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == \"utf8\":\n return strInput\n elif strCodingFmt == \"unicode\":\n return strInput.encode(\"utf8\")\n elif strCodingFmt == \"gbk\":\n return strInput.decode(\"gbk\").encode(\"utf8\")\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == \"gbk\":\n return strInput\n elif strCodingFmt == \"unicode\":\n return strInput.encode(\"gbk\")\n elif strCodingFmt == \"utf8\":\n return strInput.decode(\"utf8\").encode(\"gbk\")\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n\n return number\n\n def req(\n self,\n login_msg,\n url,\n method,\n data=None,\n headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {\n 'Content-Type': 'application/json',\n 'Accept': 'application/json'\n }\n results = 'connection error'\n # 先登录\n r = session.post('https://xxxxxx.cn/login',\n data=json.dumps(login_msg), headers=headers)\n print (\"*******************************\")\n print (u\"登录状态信息\")\n print (r.status_code)\n print (r.content)\n print (\"*******************************\")\n try:\n if method == \"post\":\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(\n url, data=data, headers=headers, verify=False)\n elif method == \"get\":\n results = session.get(\n url, params=data, headers=headers, verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n\n return results\n except requests.ConnectionError as e:\n return e\n\n def con_db(self, sql):\n db = pymysql.connect(\n host=\"1.1.5.2\",\n user=\"xxx\",\n passwd=\"xxx\",\n db=\"xxx\",\n charset='utf8')\n\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchone()\n db.close()\n return data\n",
"<docstring token>\nimport json\nimport requests\nimport time\nimport hashlib\nimport random\nimport pymysql\n__version__ = '0.1'\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n\n def tran2UTF8(self, strInput):\n \"\"\"\n 转化为utf8格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'utf8':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('utf8')\n elif strCodingFmt == 'gbk':\n return strInput.decode('gbk').encode('utf8')\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'gbk':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('gbk')\n elif strCodingFmt == 'utf8':\n return strInput.decode('utf8').encode('gbk')\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n\n def con_db(self, sql):\n db = pymysql.connect(host='1.1.5.2', user='xxx', passwd='xxx', db=\n 'xxx', charset='utf8')\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchone()\n db.close()\n return data\n",
"<docstring token>\n<import token>\n__version__ = '0.1'\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n\n def tran2UTF8(self, strInput):\n \"\"\"\n 转化为utf8格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'utf8':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('utf8')\n elif strCodingFmt == 'gbk':\n return strInput.decode('gbk').encode('utf8')\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'gbk':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('gbk')\n elif strCodingFmt == 'utf8':\n return strInput.decode('utf8').encode('gbk')\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n\n def con_db(self, sql):\n db = pymysql.connect(host='1.1.5.2', user='xxx', passwd='xxx', db=\n 'xxx', charset='utf8')\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchone()\n db.close()\n return data\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n\n def tran2UTF8(self, strInput):\n \"\"\"\n 转化为utf8格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'utf8':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('utf8')\n elif strCodingFmt == 'gbk':\n return strInput.decode('gbk').encode('utf8')\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'gbk':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('gbk')\n elif strCodingFmt == 'utf8':\n return strInput.decode('utf8').encode('gbk')\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n\n def con_db(self, sql):\n db = pymysql.connect(host='1.1.5.2', user='xxx', passwd='xxx', db=\n 'xxx', charset='utf8')\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchone()\n db.close()\n return data\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'gbk':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('gbk')\n elif strCodingFmt == 'utf8':\n return strInput.decode('utf8').encode('gbk')\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n\n def con_db(self, sql):\n db = pymysql.connect(host='1.1.5.2', user='xxx', passwd='xxx', db=\n 'xxx', charset='utf8')\n cursor = db.cursor()\n cursor.execute(sql)\n data = cursor.fetchone()\n db.close()\n return data\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n\n def tran2GBK(self, strInput):\n \"\"\"\n 转化为gbk格式\n \"\"\"\n strCodingFmt = self.getCoding(strInput)\n if strCodingFmt == 'gbk':\n return strInput\n elif strCodingFmt == 'unicode':\n return strInput.encode('gbk')\n elif strCodingFmt == 'utf8':\n return strInput.decode('utf8').encode('gbk')\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n\n def __int__(self):\n pass\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n <function token>\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n <function token>\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n\n def req(self, login_msg, url, method, data=None, headers=None):\n u\"\"\"专用,有登录状态,例\n | run interface test tenant | login_msg,url,method,data,headers\n \"\"\"\n session = requests.Session()\n url = self.tran2UTF8(url)\n method = self.tran2UTF8(method)\n if login_msg:\n login_msg = self.eval_dict(login_msg)\n md5_pwd = self.md5(login_msg['passwd'])\n login_msg['passwd'] = md5_pwd\n if data:\n data = self.eval_dict(data)\n if headers:\n headers = self.eval_dict(headers)\n else:\n headers = {'Content-Type': 'application/json', 'Accept':\n 'application/json'}\n results = 'connection error'\n r = session.post('https://xxxxxx.cn/login', data=json.dumps(\n login_msg), headers=headers)\n print('*******************************')\n print(u'登录状态信息')\n print(r.status_code)\n print(r.content)\n print('*******************************')\n try:\n if method == 'post':\n if isinstance(data, dict):\n data = json.dumps(data)\n results = session.post(url, data=data, headers=headers,\n verify=False)\n elif method == 'get':\n results = session.get(url, params=data, headers=headers,\n verify=False)\n elif method == 'delete':\n results = session.delete(url, headers=headers, verify=False)\n return results\n except requests.ConnectionError as e:\n return e\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n <function token>\n\n def md5(self, init_str):\n \"\"\"\n md5加密\n \"\"\"\n m = hashlib.md5()\n m.update(init_str)\n return m.hexdigest()\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n <function token>\n <function token>\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n\n def random_num(self, num):\n \"\"\"\n 随机出给出数字位数的数字\n \"\"\"\n number = ''\n for i in random.sample(range(10), int(num)):\n number += ''.join(str(i))\n return number\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n\n def getCoding(self, strInput):\n u\"\"\"\n 获取编码格式\n \"\"\"\n if isinstance(strInput, unicode):\n return 'unicode'\n try:\n strInput.decode('utf8')\n return 'utf8'\n except:\n pass\n try:\n strInput.decode('gbk')\n return 'gbk'\n except:\n pass\n <function token>\n <function token>\n <function token>\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def eval_dict(self, strInput):\n u\"\"\"接收字符串直接转成需要类型,例\n | eval dict | str |\n \"\"\"\n strInput = eval(strInput)\n return strInput\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n\n\nclass PublicLibrary(object):\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<class token>\n"
] | false |
99,037 |
033d7f5884d89704f7bee9251efe2ca32bdaebc8
|
import geohash
import pandas as pd
mobike_csv_file_path = r"/media/jsl/ubuntu/data/mobike/MOBIKE_CUP_2017/train.csv"
train_data = pd.read_csv(mobike_csv_file_path)
start_loc = train_data['starttime']
# with open('1.txt', 'w') as f:
# for item in start_loc:
# ll = geohash.decode(item)
# lng = ll[1]
# lat = ll[0]
# f.write(lng+','+lat+'\r\n')
print(start_loc)
|
[
"import geohash\nimport pandas as pd\n\nmobike_csv_file_path = r\"/media/jsl/ubuntu/data/mobike/MOBIKE_CUP_2017/train.csv\"\ntrain_data = pd.read_csv(mobike_csv_file_path)\nstart_loc = train_data['starttime']\n# with open('1.txt', 'w') as f:\n# for item in start_loc:\n# ll = geohash.decode(item)\n# lng = ll[1]\n# lat = ll[0]\n# f.write(lng+','+lat+'\\r\\n')\n\nprint(start_loc)",
"import geohash\nimport pandas as pd\nmobike_csv_file_path = (\n '/media/jsl/ubuntu/data/mobike/MOBIKE_CUP_2017/train.csv')\ntrain_data = pd.read_csv(mobike_csv_file_path)\nstart_loc = train_data['starttime']\nprint(start_loc)\n",
"<import token>\nmobike_csv_file_path = (\n '/media/jsl/ubuntu/data/mobike/MOBIKE_CUP_2017/train.csv')\ntrain_data = pd.read_csv(mobike_csv_file_path)\nstart_loc = train_data['starttime']\nprint(start_loc)\n",
"<import token>\n<assignment token>\nprint(start_loc)\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
99,038 |
48ce65e046aa7409b3d1ffff373c54535de44ec8
|
class MedianFinder:
def __init__(self):
"""
initialize your data structure here.
"""
self.max_heap = [] # to contain left smaller half, or + 1
self.min_heap = [] # to contain right bigger half
def addNum(self, num: int) -> None:
if not self.max_heap or -self.max_heap[0] >= num:
heapq.heappush(self.max_heap, -num)
else:
heapq.heappush(self.min_heap, num)
if len(self.max_heap) > len(self.min_heap) + 1:
heapq.heappush(self.min_heap, -heappop(self.max_heap))
elif len(self.max_heap) < len(self.min_heap):
heapq.heappush(self.max_heap, -heappop(self.min_heap))
def findMedian(self) -> float:
if len(self.max_heap) == len(self.min_heap):
return (-self.max_heap[0] + self.min_heap[0]) / 2
return -self.max_heap[0]
class MedianFinder:
def __init__(self):
self.heaps = [], []
def addNum(self, num):
small, large = self.heaps
heappush(small, -heappushpop(large, num))
if len(large) < len(small):
heappush(large, -heappop(small))
def findMedian(self):
small, large = self.heaps
if len(large) > len(small):
return float(large[0])
return (large[0] - small[0]) / 2.0
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian()
|
[
"class MedianFinder:\n\n def __init__(self):\n \"\"\"\n initialize your data structure here.\n \"\"\"\n self.max_heap = [] # to contain left smaller half, or + 1\n self.min_heap = [] # to contain right bigger half\n\n def addNum(self, num: int) -> None:\n if not self.max_heap or -self.max_heap[0] >= num:\n heapq.heappush(self.max_heap, -num)\n else:\n heapq.heappush(self.min_heap, num)\n\n if len(self.max_heap) > len(self.min_heap) + 1:\n heapq.heappush(self.min_heap, -heappop(self.max_heap))\n elif len(self.max_heap) < len(self.min_heap):\n heapq.heappush(self.max_heap, -heappop(self.min_heap))\n\n def findMedian(self) -> float:\n if len(self.max_heap) == len(self.min_heap):\n return (-self.max_heap[0] + self.min_heap[0]) / 2\n return -self.max_heap[0]\n\n\nclass MedianFinder:\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n# Your MedianFinder object will be instantiated and called as such:\n# obj = MedianFinder()\n# obj.addNum(num)\n# param_2 = obj.findMedian()\n",
"class MedianFinder:\n\n def __init__(self):\n \"\"\"\n initialize your data structure here.\n \"\"\"\n self.max_heap = []\n self.min_heap = []\n\n def addNum(self, num: int) ->None:\n if not self.max_heap or -self.max_heap[0] >= num:\n heapq.heappush(self.max_heap, -num)\n else:\n heapq.heappush(self.min_heap, num)\n if len(self.max_heap) > len(self.min_heap) + 1:\n heapq.heappush(self.min_heap, -heappop(self.max_heap))\n elif len(self.max_heap) < len(self.min_heap):\n heapq.heappush(self.max_heap, -heappop(self.min_heap))\n\n def findMedian(self) ->float:\n if len(self.max_heap) == len(self.min_heap):\n return (-self.max_heap[0] + self.min_heap[0]) / 2\n return -self.max_heap[0]\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"class MedianFinder:\n\n def __init__(self):\n \"\"\"\n initialize your data structure here.\n \"\"\"\n self.max_heap = []\n self.min_heap = []\n\n def addNum(self, num: int) ->None:\n if not self.max_heap or -self.max_heap[0] >= num:\n heapq.heappush(self.max_heap, -num)\n else:\n heapq.heappush(self.min_heap, num)\n if len(self.max_heap) > len(self.min_heap) + 1:\n heapq.heappush(self.min_heap, -heappop(self.max_heap))\n elif len(self.max_heap) < len(self.min_heap):\n heapq.heappush(self.max_heap, -heappop(self.min_heap))\n <function token>\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"class MedianFinder:\n\n def __init__(self):\n \"\"\"\n initialize your data structure here.\n \"\"\"\n self.max_heap = []\n self.min_heap = []\n <function token>\n <function token>\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"class MedianFinder:\n <function token>\n <function token>\n <function token>\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"<class token>\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n\n def addNum(self, num):\n small, large = self.heaps\n heappush(small, -heappushpop(large, num))\n if len(large) < len(small):\n heappush(large, -heappop(small))\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"<class token>\n\n\nclass MedianFinder:\n\n def __init__(self):\n self.heaps = [], []\n <function token>\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"<class token>\n\n\nclass MedianFinder:\n <function token>\n <function token>\n\n def findMedian(self):\n small, large = self.heaps\n if len(large) > len(small):\n return float(large[0])\n return (large[0] - small[0]) / 2.0\n",
"<class token>\n\n\nclass MedianFinder:\n <function token>\n <function token>\n <function token>\n",
"<class token>\n<class token>\n"
] | false |
99,039 |
0c7edc05adddc02f881c2f8677b6e83bc56396f8
|
lstEven = []
lstOdd = []
for i in range(8):
num = int(input())
if num % 2 == 0:
lstEven.append(num)
else:
lstOdd.append(num)
if(len(lstEven) > len(lstOdd)):
print("Even")
else:
print("Odd")
print(sum(lstEven))
print(sum(lstOdd))
|
[
"lstEven = []\nlstOdd = []\nfor i in range(8):\n num = int(input())\n if num % 2 == 0:\n lstEven.append(num)\n else:\n lstOdd.append(num)\n\nif(len(lstEven) > len(lstOdd)):\n print(\"Even\")\nelse:\n print(\"Odd\")\n\nprint(sum(lstEven))\nprint(sum(lstOdd))",
"lstEven = []\nlstOdd = []\nfor i in range(8):\n num = int(input())\n if num % 2 == 0:\n lstEven.append(num)\n else:\n lstOdd.append(num)\nif len(lstEven) > len(lstOdd):\n print('Even')\nelse:\n print('Odd')\nprint(sum(lstEven))\nprint(sum(lstOdd))\n",
"<assignment token>\nfor i in range(8):\n num = int(input())\n if num % 2 == 0:\n lstEven.append(num)\n else:\n lstOdd.append(num)\nif len(lstEven) > len(lstOdd):\n print('Even')\nelse:\n print('Odd')\nprint(sum(lstEven))\nprint(sum(lstOdd))\n",
"<assignment token>\n<code token>\n"
] | false |
99,040 |
aae702ad85bbbdc57b74c0b62f2a539b81a39125
|
trackDict = {
"24 Hours of Le Mans Circuit": "1740968730",
"Autodromo Internazionale Enzo E Dino Ferrari Imola": "920145926",
"Autodromo Nazionale Monza GP": "4241994684",
"Autodromo Nazionale Monza GP Historic": "1184596327",
"Autodromo Nazionale Monza Historic Oval + GP Mix": "1327182267",
"Autodromo Nazionale Monza Oval Historic": "4131920659",
"Autodromo Nazionale Monza Short": "368740158",
"Autódromo Internacional do Algarve": "3878349996",
"Azure Circuit": "832629329",
"Azure Coast": "560711985"}
for key, value in trackDict.items():
if key == "Azure Coast":
track = key
trackid = value
print(track)
print(trackid)
|
[
"trackDict =\t{\r\n\t\"24 Hours of Le Mans Circuit\": \"1740968730\",\r\n\t\"Autodromo Internazionale Enzo E Dino Ferrari Imola\": \"920145926\",\r\n\t\"Autodromo Nazionale Monza GP\": \"4241994684\",\r\n\t\"Autodromo Nazionale Monza GP Historic\": \"1184596327\",\r\n\t\"Autodromo Nazionale Monza Historic Oval + GP Mix\": \"1327182267\",\r\n\t\"Autodromo Nazionale Monza Oval Historic\": \"4131920659\",\r\n\t\"Autodromo Nazionale Monza Short\": \"368740158\",\r\n\t\"Autódromo Internacional do Algarve\": \"3878349996\",\r\n\t\"Azure Circuit\": \"832629329\",\r\n\t\"Azure Coast\": \"560711985\"}\r\n\r\n\r\n\r\nfor key, value in trackDict.items():\r\n if key == \"Azure Coast\":\r\n track = key\r\n trackid = value\r\n\r\n\r\nprint(track)\r\nprint(trackid)",
"trackDict = {'24 Hours of Le Mans Circuit': '1740968730',\n 'Autodromo Internazionale Enzo E Dino Ferrari Imola': '920145926',\n 'Autodromo Nazionale Monza GP': '4241994684',\n 'Autodromo Nazionale Monza GP Historic': '1184596327',\n 'Autodromo Nazionale Monza Historic Oval + GP Mix': '1327182267',\n 'Autodromo Nazionale Monza Oval Historic': '4131920659',\n 'Autodromo Nazionale Monza Short': '368740158',\n 'Autódromo Internacional do Algarve': '3878349996', 'Azure Circuit':\n '832629329', 'Azure Coast': '560711985'}\nfor key, value in trackDict.items():\n if key == 'Azure Coast':\n track = key\n trackid = value\nprint(track)\nprint(trackid)\n",
"<assignment token>\nfor key, value in trackDict.items():\n if key == 'Azure Coast':\n track = key\n trackid = value\nprint(track)\nprint(trackid)\n",
"<assignment token>\n<code token>\n"
] | false |
99,041 |
d451b603281267f5cb8525fd718500d4c17f8705
|
import numpy as np
np.random.seed(0)
from procs import _corr
from timeseries import TimeSeries
def test_tsmaker():
#Setting seed to equate the two timeseries
_,t1 = _corr.tsmaker(0.5, 0.1, 0.01)
assert(len(t1.values()) == 100)
def test_randomts():
t1 = _corr.random_ts(0.5)
assert(len(t1.values()) == 100)
def test_stand():
t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
val = _corr.stand(np.array(t1.values()), 55.0, 10)
assert(list(val) == [-1.5, -0.5, 0.5, 1.5])
def test_ccor():
#Testing the corr function independently
t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
val = _corr.ccor(t1, t2)
assert(list(np.real(val)) == [12600, 12000, 11800, 12000])
assert(list(np.imag(val)) == [0, 0, 0, 0])
def test_maxcorr():
t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
t2 = TimeSeries([1, 2, 3, 4], [50, 60, 70, 40])
standts1 = _corr.stand(t1, t1.mean(), t1.std())
standts2 = _corr.stand(t2, t2.mean(), t2.std())
idx, mcorr = _corr.max_corr_at_phase(standts1, standts2)
#idx should be equal to one since the second ts is shifted by 1
assert(idx == 1)
assert(np.real(mcorr) == 4)
def test_kernelcorr():
t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])
standts1 = _corr.stand(t1, t1.mean(), t1.std())
standts2 = _corr.stand(t2, t2.mean(), t2.std())
#Kernel_corr should return a correlation of 1.0 since we use similar timeseries
assert(_corr.kernel_corr(standts1, standts2, mult=1) == 1.0)
|
[
"import numpy as np\nnp.random.seed(0)\nfrom procs import _corr\nfrom timeseries import TimeSeries\n\n\ndef test_tsmaker():\n #Setting seed to equate the two timeseries\n _,t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert(len(t1.values()) == 100)\n\ndef test_randomts():\n t1 = _corr.random_ts(0.5)\n assert(len(t1.values()) == 100)\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert(list(val) == [-1.5, -0.5, 0.5, 1.5])\n\ndef test_ccor():\n #Testing the corr function independently\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert(list(np.real(val)) == [12600, 12000, 11800, 12000])\n assert(list(np.imag(val)) == [0, 0, 0, 0])\n\ndef test_maxcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [50, 60, 70, 40])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n idx, mcorr = _corr.max_corr_at_phase(standts1, standts2)\n #idx should be equal to one since the second ts is shifted by 1\n assert(idx == 1)\n assert(np.real(mcorr) == 4)\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n #Kernel_corr should return a correlation of 1.0 since we use similar timeseries\n assert(_corr.kernel_corr(standts1, standts2, mult=1) == 1.0)\n \n\n",
"import numpy as np\nnp.random.seed(0)\nfrom procs import _corr\nfrom timeseries import TimeSeries\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\ndef test_randomts():\n t1 = _corr.random_ts(0.5)\n assert len(t1.values()) == 100\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\ndef test_ccor():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert list(np.real(val)) == [12600, 12000, 11800, 12000]\n assert list(np.imag(val)) == [0, 0, 0, 0]\n\n\ndef test_maxcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [50, 60, 70, 40])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n idx, mcorr = _corr.max_corr_at_phase(standts1, standts2)\n assert idx == 1\n assert np.real(mcorr) == 4\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\nnp.random.seed(0)\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\ndef test_randomts():\n t1 = _corr.random_ts(0.5)\n assert len(t1.values()) == 100\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\ndef test_ccor():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert list(np.real(val)) == [12600, 12000, 11800, 12000]\n assert list(np.imag(val)) == [0, 0, 0, 0]\n\n\ndef test_maxcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [50, 60, 70, 40])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n idx, mcorr = _corr.max_corr_at_phase(standts1, standts2)\n assert idx == 1\n assert np.real(mcorr) == 4\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\ndef test_randomts():\n t1 = _corr.random_ts(0.5)\n assert len(t1.values()) == 100\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\ndef test_ccor():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert list(np.real(val)) == [12600, 12000, 11800, 12000]\n assert list(np.imag(val)) == [0, 0, 0, 0]\n\n\ndef test_maxcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [50, 60, 70, 40])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n idx, mcorr = _corr.max_corr_at_phase(standts1, standts2)\n assert idx == 1\n assert np.real(mcorr) == 4\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\ndef test_randomts():\n t1 = _corr.random_ts(0.5)\n assert len(t1.values()) == 100\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\ndef test_ccor():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert list(np.real(val)) == [12600, 12000, 11800, 12000]\n assert list(np.imag(val)) == [0, 0, 0, 0]\n\n\n<function token>\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\n<function token>\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\ndef test_ccor():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.ccor(t1, t2)\n assert list(np.real(val)) == [12600, 12000, 11800, 12000]\n assert list(np.imag(val)) == [0, 0, 0, 0]\n\n\n<function token>\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\n<function token>\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\n<function token>\n<function token>\n\n\ndef test_kernelcorr():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n t2 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n standts1 = _corr.stand(t1, t1.mean(), t1.std())\n standts2 = _corr.stand(t2, t2.mean(), t2.std())\n assert _corr.kernel_corr(standts1, standts2, mult=1) == 1.0\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\n<function token>\n\n\ndef test_stand():\n t1 = TimeSeries([1, 2, 3, 4], [40, 50, 60, 70])\n val = _corr.stand(np.array(t1.values()), 55.0, 10)\n assert list(val) == [-1.5, -0.5, 0.5, 1.5]\n\n\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n<import token>\n\n\ndef test_tsmaker():\n _, t1 = _corr.tsmaker(0.5, 0.1, 0.01)\n assert len(t1.values()) == 100\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<code token>\n<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,042 |
5587c4eb4a8a8756a5f4ccf474781699c0c8d7f9
|
#! /usr/bin/env python2
'''
Register a mDNS/DNS-SD alias name for your computer using the Avahi daemon
This script will register an alternate CNAME alias besides your hostname,
which could be useful for ex. when serving several http virtual hosts to
your ffriends on the local network and you don't want to make them configure
their /etc/hosts.
Why a CNAME? You could also publish your current address with avahi-publish-address
but on a multihomed host (connected via wifi0 and eth0 perhaps) a single
address will not be valid on both networks. So this publishes a CNAME to your
hostname, which, by default, is already published by Avahi.
domain should almost always be .local
the cname is not restricted to ascii, it'll be encoded as IDNA
The alias will stay published until the script runs.
'''
import avahi, dbus
from encodings.idna import ToASCII
TTL = 60
# Got these from /usr/include/avahi-common/defs.h
CLASS_IN = 0x01
TYPE_CNAME = 0x05
def publish_cname(cname):
bus = dbus.SystemBus()
server = dbus.Interface(bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER),
avahi.DBUS_INTERFACE_SERVER)
group = dbus.Interface(bus.get_object(avahi.DBUS_NAME, server.EntryGroupNew()),
avahi.DBUS_INTERFACE_ENTRY_GROUP)
if not u'.' in cname:
cname = cname + '.local'
cname = encode_cname(cname)
rdata = encode_rdata(server.GetHostNameFqdn())
rdata = avahi.string_to_byte_array(rdata)
group.AddRecord(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
cname, CLASS_IN, TYPE_CNAME, TTL, rdata)
group.Commit()
def encode_cname(name):
return '.'.join( ToASCII(p) for p in name.split('.') if p )
def encode_rdata(name):
def enc(part):
a = ToASCII(part)
return chr(len(a)), a
return ''.join( '%s%s' % enc(p) for p in name.split('.') if p ) + '\0'
if __name__ == '__main__':
import time, sys, locale
if len(sys.argv)<2:
script_name = sys.argv[0]
print "Usage: %s hostname.local [hostname2.local] [hostname3.local]" % script_name
sys.exit(1)
for each in sys.argv[1:]:
name = unicode(each, locale.getpreferredencoding())
publish_cname(name)
try:
while True: time.sleep(60)
except KeyboardInterrupt:
print "Exiting"
sys.exit(0)
|
[
"#! /usr/bin/env python2\n'''\nRegister a mDNS/DNS-SD alias name for your computer using the Avahi daemon\n\nThis script will register an alternate CNAME alias besides your hostname,\nwhich could be useful for ex. when serving several http virtual hosts to \nyour ffriends on the local network and you don't want to make them configure\ntheir /etc/hosts.\n\nWhy a CNAME? You could also publish your current address with avahi-publish-address\nbut on a multihomed host (connected via wifi0 and eth0 perhaps) a single\naddress will not be valid on both networks. So this publishes a CNAME to your\nhostname, which, by default, is already published by Avahi.\n\ndomain should almost always be .local\nthe cname is not restricted to ascii, it'll be encoded as IDNA\n\nThe alias will stay published until the script runs.\n'''\nimport avahi, dbus\nfrom encodings.idna import ToASCII\n\nTTL = 60\n# Got these from /usr/include/avahi-common/defs.h\nCLASS_IN = 0x01\nTYPE_CNAME = 0x05\n\n\ndef publish_cname(cname):\n bus = dbus.SystemBus()\n server = dbus.Interface(bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER),\n avahi.DBUS_INTERFACE_SERVER)\n group = dbus.Interface(bus.get_object(avahi.DBUS_NAME, server.EntryGroupNew()),\n avahi.DBUS_INTERFACE_ENTRY_GROUP)\n\n if not u'.' in cname:\n cname = cname + '.local'\n cname = encode_cname(cname)\n rdata = encode_rdata(server.GetHostNameFqdn())\n rdata = avahi.string_to_byte_array(rdata)\n\n group.AddRecord(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),\n cname, CLASS_IN, TYPE_CNAME, TTL, rdata)\n group.Commit()\n\n\ndef encode_cname(name):\n return '.'.join( ToASCII(p) for p in name.split('.') if p )\n\ndef encode_rdata(name):\n def enc(part):\n a = ToASCII(part)\n return chr(len(a)), a\n return ''.join( '%s%s' % enc(p) for p in name.split('.') if p ) + '\\0'\n\nif __name__ == '__main__':\n import time, sys, locale\n if len(sys.argv)<2:\n script_name = sys.argv[0]\n print \"Usage: %s hostname.local [hostname2.local] [hostname3.local]\" % script_name\n sys.exit(1)\n\n for each in sys.argv[1:]:\n name = unicode(each, locale.getpreferredencoding())\n publish_cname(name)\n try:\n while True: time.sleep(60)\n except KeyboardInterrupt:\n print \"Exiting\"\n sys.exit(0)"
] | true |
99,043 |
e7bf0633de83bb16cc69d74f5e996ecea658ac19
|
from django.db import models
from datetime import datetime
class Monday(models.Model):
favorite = models.CharField(max_length=100)
spread = models.CharField(max_length=100)
underdog = models.CharField(max_length=100)
list_total = models.CharField(max_length=100)
win = models.CharField(max_length=100)
is_published = models.BooleanField(default=True)
list_data = models.CharField(max_length=100)
def __str__(self):
return self.favorite
|
[
"from django.db import models\nfrom datetime import datetime\n\n\nclass Monday(models.Model):\n favorite = models.CharField(max_length=100)\n spread = models.CharField(max_length=100)\n underdog = models.CharField(max_length=100)\n list_total = models.CharField(max_length=100)\n win = models.CharField(max_length=100)\n is_published = models.BooleanField(default=True)\n list_data = models.CharField(max_length=100)\n\n def __str__(self):\n return self.favorite\n",
"<import token>\n\n\nclass Monday(models.Model):\n favorite = models.CharField(max_length=100)\n spread = models.CharField(max_length=100)\n underdog = models.CharField(max_length=100)\n list_total = models.CharField(max_length=100)\n win = models.CharField(max_length=100)\n is_published = models.BooleanField(default=True)\n list_data = models.CharField(max_length=100)\n\n def __str__(self):\n return self.favorite\n",
"<import token>\n\n\nclass Monday(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.favorite\n",
"<import token>\n\n\nclass Monday(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,044 |
046c35112be6c66e4cd179dc908c4c695514a077
|
import datetime
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils import timezone
from django.conf import settings
# Receive the pre_delete signal and delete the file associated with the model instance.
from django.db.models.signals import pre_delete
from django.dispatch.dispatcher import receiver
class Estado(models.Model):
nome = models.CharField(null=True, blank=True, max_length=75)
uf = models.CharField(null=True, blank=True, max_length=5)
def __str__(self):
return self.uf
class Cidade(models.Model):
nome = models.CharField(null=True, blank=True, max_length=120)
estado = models.ForeignKey(Estado, on_delete=models.CASCADE)
estado_uf = models.CharField(null=True, blank=True, max_length=5)
def __str__(self):
return self.nome
class Ongs(models.Model):
ADOPTION_VALUE = [
('Gratuito', 'Gratuito'),
('10,00', '10,00'),
('15,00', '15,00'),
('20,00', '20,00'),
('25,00', '25,00'),
('30,00', '30,00'),
('35,00', '35,00'),
('40,00', '40,00'),
('45,00', '45,00'),
('50,00', '50,00'),
('55,00', '55,00'),
('60,00', '60,00'),
('65,00', '65,00'),
('70,00', '70,00'),
('75,00', '75,00'),
('80,00', '80,00'),
('85,00', '85,00'),
('90,00', '90,00'),
('95,00', '95,00'),
('100,00', '100,00'),
]
OPEN_HOURS = [
('06:00', '06:00'),
('06:30', '06:30'),
('07:00', '07:00'),
('07:30', '07:30'),
('08:00', '08:00'),
('08:30', '08:30'),
('09:00', '09:00'),
('09:30', '09:30'),
('10:00', '10:00'),
('10:30', '10:30'),
('11:00', '11:00'),
('11:30', '11:30'),
('12:00', '12:00'),
('12:30', '12:30'),
]
CLOSE_HOURS = [
('16:00', '16:00'),
('16:30', '16:30'),
('17:00', '17:00'),
('17:30', '17:30'),
('18:00', '18:00'),
('18:30', '18:30'),
('19:00', '19:00'),
('19:30', '19:30'),
('20:00', '20:00'),
('20:30', '20:30'),
('21:00', '21:00'),
('21:30', '21:30'),
('22:00', '22:00'),
('22:30', '22:30'),
]
DDD = [
('11', '11'),
('12', '12'),
('13', '13'),
('14', '14'),
('15', '15'),
('16', '16'),
('17', '17'),
('18', '18'),
('19', '19'),
('21', '21'),
('22', '22'),
('24', '24'),
('27', '27'),
('28', '28'),
('31', '31'),
('32', '32'),
('33', '33'),
('34', '34'),
('35', '35'),
('37', '37'),
('38', '38'),
('41', '41'),
('42', '42'),
('43', '43'),
('44', '44'),
('45', '45'),
('46', '46'),
('47', '47'),
('48', '48'),
('49', '49'),
('51', '51'),
('53', '53'),
('54', '54'),
('55', '55'),
('61', '61'),
('62', '62'),
('63', '63'),
('64', '64'),
('65', '65'),
('66', '66'),
('67', '67'),
('68', '68'),
('69', '69'),
('71', '71'),
('73', '73'),
('74', '74'),
('75', '75'),
('77', '77'),
('79', '79'),
('81', '81'),
('82', '82'),
('83', '83'),
('84', '84'),
('85', '85'),
('86', '86'),
('87', '87'),
('88', '88'),
('89', '89'),
('91', '91'),
('92', '92'),
('93', '93'),
('94', '94'),
('95', '95'),
('96', '96'),
('97', '97'),
('98', '98'),
('99', '99'),
]
TRANSPORTATION = [
('Grtis', 'Grtis'),
('1,00', '1,00'),
('2,00', '2,00'),
('3,00', '3,00'),
('4,00', '4,00'),
('5,00', '5,00'),
('7,00', '7,00'),
('10,00', '10,00'),
('12,00', '12,00'),
('15,00', '15,00'),
('18,00', '18,00'),
('20,00', '20,00'),
('25,00', '25,00'),
('30,00', '30,00'),
('35,00', '35,00'),
('40,00', '40,00'),
('45,00', '45,00'),
('50,00', '50,00'),
('55,00', '65,00'),
('60,00', '60,00'),
]
name = models.CharField(null=True, blank=True, max_length=40)
rate = models.CharField(null=True, blank=True, default='Gratuito', max_length=8, choices=ADOPTION_VALUE)
hour_open = models.CharField(blank=True, null=True, default='', max_length=5, choices=OPEN_HOURS)
hour_close = models.CharField(blank=True, null=True, default='', max_length=5, choices=CLOSE_HOURS)
mission_statement = models.CharField(null=True, blank=True, default='', max_length=300)
description = models.CharField(null=True, blank=True, default='', max_length=500)
web_site = models.CharField(null=True, blank=True, max_length=150)
phone_number_ddd = models.CharField(null=True, blank=True, max_length=3, choices=DDD)
phone_number = models.CharField(null=True, blank=True, max_length=12)
email = models.CharField(null=True, blank=True, max_length=100)
facebook = models.CharField(null=True, blank=True, max_length=100)
instagram = models.CharField(null=True, blank=True, max_length=40)
logo_link = models.ImageField(null=True, blank=True)
picture_1 = models.ImageField(null=True, blank=True)
picture_2 = models.ImageField(null=True, blank=True)
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True, blank=True)
state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=True, blank=True)
is_foster_ok = models.BooleanField(default=0)
is_volunteer_ok = models.BooleanField(default=0)
has_transportation = models.BooleanField(default=0)
cnpj = models.CharField(null=True, blank=True, max_length=18)
founded_date = models.CharField(null=True, blank=True, max_length=30, default='')
is_approved = models.BooleanField(default=0)
transportation_price = models.CharField(null=True, blank=True, max_length=7, choices=TRANSPORTATION)
def __str__(self):
return self.name
class User(AbstractUser):
PERMISSION = [
('Editar tudo', 'Editar tudo'),
('Editar equipe e pets', 'Editar equipe e pets'),
('Editar pets', 'Editar pets'),
('Visualizar equipe e pets', 'Visualizar equipe e pets'),
('Visualizar pets', 'Visualizar pets'),
]
ROLE = [
('Advogado(a)', 'Advogado(a)'),
('Auxiliar de veterinrio', 'Auxiliar de veterinrio'),
('Bilogo(a)', 'Bilogo(a)'),
('Colaborador(a)', 'Colaborador(a)'),
('Departamento administrativo', 'Departamento administrativo'),
('Departamento de atendimento', 'Departamento de atendimento'),
('Departamento de eventos', 'Departamento de eventos'),
('Departamento educativo', 'Departamento educativo'),
('Departamento de marketing', 'Departamento de marketing'),
('Departamento financeiro', 'Departamento financeiro'),
('Diretor(a) administrativo', 'Diretor(a) administrativo'),
('Diretor(a) de eventos', 'Diretor(a) de eventos'),
('Diretor(a) financeiro', 'Diretor(a) financeiro'),
('Diretor(a) geral', 'Diretor(a) geral'),
('Diretor(a) marketing', 'Diretor(a) marketing'),
('Diretor(a) tcnico', 'Diretor(a) tcnico'),
('Funcionrio(a)', 'Funcionrio(a)'),
('Fundador(a)', 'Fundador(a)'),
('Presidente', 'Presidente'),
('Protetor(a) associado', 'Protetor(a) associado'),
('Secretrio(a)', 'Secretrio(a)'),
('Suplente de secretrio', 'Suplente de secretrio'),
('Suplente de presidente', 'Suplente de presidente'),
('Suplente de vice-presidente', 'Suplente de vice-presidente'),
('Tesoreiro(a)', 'Tesoreiro(a)'),
('Veterinrio(a)', 'Veterinrio(a)'),
('Vice-presidente', 'Vice-presidente'),
('Voluntrio(a)', 'Voluntrio(a)'),
]
permission_ong = models.CharField(null=True, blank=True, max_length=30, choices=PERMISSION)
role_ong = models.CharField(null=True, blank=True, max_length=30, choices=ROLE)
birth_date = models.DateField(null=True, blank=True)
has_confirmed_email = models.BooleanField(default=0)
country = models.CharField(null=True, blank=True, max_length=50)
state_code = models.CharField(null=True, blank=True, max_length=3)
city = models.CharField(null=True, blank=True, max_length=50)
neighborhood = models.CharField(null=True, blank=True, max_length=50)
rg = models.CharField(null=True, blank=True, max_length=12)
cpf = models.CharField(null=True, blank=True, max_length=15)
phone_number_ddd = models.CharField(null=True, max_length=3)
phone_number = models.CharField(null=True, blank=True, max_length=10)
address_street = models.CharField(null=True, blank=True, max_length=70)
address_number = models.CharField(null=True, blank=True, max_length=6)
address_complement = models.CharField(null=True, blank=True, max_length=10)
postal_code = models.CharField(null=True, blank=True, max_length=10)
facebook_id = models.CharField(null=True, blank=True, max_length=30)
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True, blank=True)
class Pet_breed(models.Model):
name = models.CharField(null=True, blank=True, max_length=100)
species = models.CharField(null=True, blank=True, max_length=30)
def __str__(self):
return self.name
class Pet(models.Model):
COLOR_OF_PETS = [
('Amarelo', 'Amarelo'),
('Branco', 'Branco'),
('Cinza', 'Cinza'),
('Creme', 'Creme'),
('Laranja', 'Laranja'),
('Marrom', 'Marrom'),
('Preto', 'Preto'),
]
COLOR_PATTERN_OF_PETS = [
('Arlequim', 'Arlequim'),
('Belton', 'Belton'),
('Bicolor', 'Bicolor'),
('Fulvo','Fulvo'),
('Lobeiro', 'Ruo'),
('Merle', 'Merle'),
('Pintaigado', 'Pintaigado'),
('Ruo', 'Ruo'),
('Sal e Pimenta', 'Sal e Pimenta'),
('Tigrado', 'Tigrado'),
('Unicolor','Unicolor')
]
GENDER_OF_PETS = [
('Fmea', 'Fmea'),
('Macho', 'Macho'),
]
ACTIVITY_LEVEL_PETS = [
('Hiperativo', 'Hiperativo'),
('Ativo', 'Ativo'),
('Moderado', 'Moderado'),
('Baixo', 'Baixo'),
]
SPECIAL_NEED = [
('3 patas funcionais', '3 patas funcionais'),
('2 patas funcionais', '2 patas funcionais'),
('1 pata funcional', '1 pata funcional'),
('0 patas funcionais', '0 patas funcionais'),
('Apenas alguns dentes', 'Apenas alguns dentes'),
('Cegueira parcial', 'Cegueira parcial'),
('Cegueira total', 'Cegueira total'),
('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),
('Necessidade de terapias para sempre', 'Necessidade de terapias'),
('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),
('Nenhum dente', 'Nenhum dente'),
('Doena Neural','Doena Neural'),
('Rabo amputado', 'Rabo amputado'),
('Surdez parcial', 'Surdez parcial'),
('Surdez total', 'Surdez total'),
]
CONFORTABLE = [
('No', 'No'),
('Sim', 'Sim'),
('No sei', 'No sei'),
]
STATUS_OF_PETS = [
('A caminho do novo lar', 'A caminho do novo lar'),
('Adoo pendente', 'Adoo pendente'),
('Adotado', 'Adotado'),
('Doente', 'Doente'),
('Esperando visita', 'Esperando visita'),
('Falecido', 'Falecido'),
('Retornando para abrigo','Retornando para abrigo'),
('Lar provisrio','Lar provisrio'),
('Lar provisrio pelo FDS','Lar provisrio pelo FDS'),
]
STATUS_OF_TEETH = [
('Perfeitos', 'Perfeitos'),
('Um pouco de trtaro', 'Um pouco de trtaro'),
('Trtaro mediano', 'Trtaro mediano'),
('Perdeu alguns dentes', 'Perdeu alguns dentes'),
('Dentes permitem apenas comida mole', 'Dentes permitem apenas comida mole'),
('Perdeu quase todos ou todos os dentes', 'Perdeu quase todos ou todos os dentes'),
]
COAT_OF_PETS = [
('Arrepiado ', 'Arrepiado'),
('Liso', 'Liso'),
('Ondulado', 'Ondulado'),
]
COAT_SIZE_OF_PETS = [
('Curto', 'Curto'),
('Mdio', 'Mdio'),
('Longo', 'Longo'),
]
SPECIES_OF_PETS = [
('Cachorro', 'Cachorro'),
('Gato', 'Gato'),
('Outros', 'Outros'),
]
SIZE_OF_PETS = [
('Mini', 'Mini'),
('Pequeno', 'Pequeno'),
('Mdio', 'Mdio'),
('Grande', 'Grande'),
('Gigante', 'Gigante'),
]
AGE_CATEGORY_OF_PETS = [
('Filhote', 'Filhote'),
('Adolescente', 'Adolescente'),
('Adulto', 'Adulto'),
('Maduro', 'Maduro'),
('Idoso', 'Idoso'),
]
AGE_OF_PETS = [
('1 ms', '1 ms'),
('2 meses', '2 meses'),
('3 meses', '3 meses'),
('4 meses', '4 meses'),
('5 meses', '5 meses'),
('6 meses', '6 meses'),
('7 meses', '7 meses'),
('8 meses', '8 meses'),
('9 meses', '9 meses'),
('10 meses', '10 meses'),
('11 meses', '11 meses'),
('1 ano', '1 ano'),
('2 anos', '2 anos'),
('3 anos', '3 anos'),
('4 anos', '4 anos'),
('5 anos', '5 anos'),
('6 anos', '6 anos'),
('7 anos', '7 anos'),
('8 anos', '8 anos'),
('9 anos', '9 anos'),
('10 anos', '10 anos'),
('11 anos', '11 anos'),
('12 anos', '12 anos'),
('13 anos', '13 anos'),
('14 anos', '14 anos'),
('15 anos', '15 anos'),
('16 anos', '16 anos'),
('17 anos', '17 anos'),
('18 anos', '18 anos'),
('19 anos', '19 anos'),
('20 anos', '20 anos'),
('21 anos', '21 anos'),
('22 anos', '22 anos'),
('23 anos', '23 anos'),
('24 anos', '24 anos'),
('25 anos', '25 anos'),
('26 anos', '26 anos'),
('27 anos', '27 anos'),
('28 anos', '28 anos'),
('29 anos', '29 anos'),
('30 anos', '30 anos'),
('No sei', 'No sei'),
]
DAY_OF_PETS = [
('No sei', 'No sei'),
('1', '1'),
('2', '2'),
('3', '3'),
('4', '4'),
('5', '5'),
('6', '6'),
('7', '7'),
('8', '8'),
('9', '9'),
('10', '10'),
('11', '11'),
('12', '12'),
('13', '13'),
('14', '14'),
('15', '15'),
('16', '16'),
('17', '17'),
('18', '18'),
('19', '19'),
('20', '20'),
('21', '21'),
('22', '22'),
('23', '23'),
('24', '24'),
('25', '25'),
('26', '26'),
('27', '27'),
('28', '28'),
('29', '29'),
('30', '30'),
('31', '31'),
]
MONTH_OF_PETS = [
('No sei', 'No sei'),
('Janeiro', 'Janeiro'),
('Fevereiro', 'Fevereiro'),
('Maro', 'Maro'),
('Abril', 'Abril'),
('Maio', 'Maio'),
('Junho', 'Junho'),
('Julho', 'Julho'),
('Agosto', 'Agosto'),
('Setembro', 'Setembro'),
('Outubro', 'Outubro'),
('Novembro', 'Novembro'),
('Dezembro', 'Dezembro'),
]
AGE_OF_PETS = [
('1 ms', '1 ms'),
('2 meses', '2 meses'),
('3 meses', '3 meses'),
('4 meses', '4 meses'),
('5 meses', '5 meses'),
('6 meses', '6 meses'),
('7 meses', '7 meses'),
('8 meses', '8 meses'),
('9 meses', '9 meses'),
('10 meses', '10 meses'),
('11 meses', '11 meses'),
('1 ano', '1 ano'),
('2 anos', '2 anos'),
('3 anos', '3 anos'),
('4 anos', '4 anos'),
('5 anos', '5 anos'),
('6 anos', '6 anos'),
('7 anos', '7 anos'),
('8 anos', '8 anos'),
('9 anos', '9 anos'),
('10 anos', '10 anos'),
('11 anos', '11 anos'),
('12 anos', '12 anos'),
('13 anos', '13 anos'),
('14 anos', '14 anos'),
('15 anos', '15 anos'),
('16 anos', '16 anos'),
('17 anos', '17 anos'),
('18 anos', '18 anos'),
('19 anos', '19 anos'),
('20 anos', '20 anos'),
('21 anos', '21 anos'),
('22 anos', '22 anos'),
('23 anos', '23 anos'),
('24 anos', '24 anos'),
('25 anos', '25 anos'),
('26 anos', '26 anos'),
('27 anos', '27 anos'),
('28 anos', '28 anos'),
('29 anos', '29 anos'),
('30 anos', '30 anos'),
('Menos de 1 ano', 'Menos de 1 ano'),
]
RETURN_OF_PETS = [
(0, 0),
(1, 1),
(2, 2),
(3, 3),
(4, 4),
(5, 5),
(6, 6),
(7, 7),
(8, 8),
(9, 9),
(10, 10),
]
TYPES_STREET = [
('Alameda', 'Alameda'),
('Avenida', 'Avenida'),
('Chcara', 'Chcara'),
('Colnia', 'Colnia'),
('Condomnio', 'Condomnio'),
('Conjunto', 'Conjunto'),
('Estao', 'Estao'),
('Estrada', 'Estrada'),
('Favela', 'Favela'),
('Fazenda', 'Fazenda'),
('Jardim', 'Jardim'),
('Ladeira', 'Ladeira'),
('Lago', 'Lago'),
('Largo', 'Largo'),
('Loteamento', 'Loteamento'),
('Passarela', 'Passarela'),
('Parque', 'Parque'),
('Praa', 'Praa'),
('Praia','Praia'),
('Rodovia', 'Rodovia'),
('Rua', 'Rua'),
('Setor', 'Setor'),
('Travessa', 'Travessa'),
('Viaduto', 'Viaduto'),
('Vila', 'Vila'),
]
SPECIAL_NEED = [
('3 patas funcionais', '3 patas funcionais'),
('2 patas funcionais', '2 patas funcionais'),
('1 pata funcional', '1 pata funcional'),
('0 patas funcionais', '0 patas funcionais'),
('No pode mastigar', 'No pode mastigar'),
('Cegueira parcial', 'Cegueira parcial'),
('Cegueira total', 'Cegueira total'),
('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),
('Necessidade de terapias para sempre', 'Necessidade de terapias'),
('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),
('Doena mental','Doena mental'),
('Epilepsia','Epilesia'),
('Rabo amputado', 'Rabo amputado'),
('Surdez parcial', 'Surdez parcial'),
('Surdez total', 'Surdez total'),
('No sente cheiro','No sente cheiro')
]
def get_years():
now = int(timezone.now().year) + 1
past = timezone.now().year - 30
a = []
for i in reversed(range(past,now)):
a.append((i,i))
a = tuple(a)
return a
name = models.CharField("Nome", null=True, blank=True, max_length=30)
pet_description = models.CharField(null=True, blank=True, max_length = 700)
age = models.CharField(null=True, blank=True, max_length=40, choices=AGE_OF_PETS, default='')
age_category = models.CharField(null=True, blank=True, max_length=30, choices=AGE_CATEGORY_OF_PETS, default='')
species = models.CharField(null=True, blank=True, max_length=25,choices=SPECIES_OF_PETS, default='')
primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE, null=True, blank=True, related_name='primary_breed')
secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE, null=True, blank=True, related_name='secondary_breed')
color = models.CharField(null=True, blank=True, max_length=30,choices=COLOR_OF_PETS, default='')
coat = models.CharField(null=True, blank=True, max_length=20,choices=COAT_OF_PETS, default='')
gender = models.CharField(null=True, blank=True, max_length=10, choices=GENDER_OF_PETS, default='')
birth_day = models.CharField(default=0, null=True, blank=True, max_length=30,choices=DAY_OF_PETS,)
birth_month = models.CharField(default=0, null=True, blank=True, max_length=30,choices=MONTH_OF_PETS,)
birth_year = models.IntegerField(default=0, null=True, blank=True, choices=get_years())
is_microchiped = models.BooleanField(default=0)
activity_level = models.CharField(null=True, blank=True, max_length=40, choices=ACTIVITY_LEVEL_PETS, default='')
is_basic_trainned = models.BooleanField(default=0)
confortable_with_kids = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
confortable_with_elder = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
confortable_with_cats = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
confortable_with_dogs = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
confortable_with_men = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
confortable_with_women = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')
arrival_date = models.CharField(null=True, blank=True, max_length=30, default='')
where_was_found_name = models.CharField(null=True, blank=True, max_length=100, default='')
is_neutered = models.BooleanField(default=0)
was_rabbies_vaccinated_this_year = models.BooleanField(default=0)
was_v_vaccinated_this_year = models.BooleanField(default=0)
was_others_vaccinated_this_year = models.BooleanField(default=0)
profile_picture = models.ImageField(null=True, blank=True)
picture_1 = models.ImageField(null=True, blank=True)
picture_2 = models.ImageField(null=True, blank=True)
picture_3 = models.ImageField(null=True, blank=True)
video = models.CharField(null=True, blank=True, max_length=150)
qty_views = models.IntegerField(default=0)
qty_favorites = models.IntegerField(default=0)
qty_msg = models.IntegerField(default=0)
qty_shares = models.IntegerField(default=0)
ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)
status = models.CharField(null=True, blank=True, max_length=50, choices=STATUS_OF_PETS, default='')
coat_size = models.CharField(null=True, blank=True, max_length=50, choices=COAT_SIZE_OF_PETS, default='')
walk_pull = models.BooleanField(default=0)
walk_pull_hard = models.BooleanField(default=0)
walk_dogs = models.BooleanField(default=0)
walk_people = models.BooleanField(default=0)
walk_fear = models.BooleanField(default=0)
color_pattern = models.CharField(null=True, blank=True, max_length=30,choices=COLOR_PATTERN_OF_PETS, default='')
size = models.CharField(null=True, blank=True, max_length=50,choices=SIZE_OF_PETS, default='')
qty_preview_adoptions = models.IntegerField(default=0, choices=RETURN_OF_PETS)
qty_adoptions_app = models.IntegerField(default=0)
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
teeth_status = models.CharField(null=True, blank=True, max_length=50, choices=STATUS_OF_TEETH, default='')
combo_adoption_id = models.IntegerField(default=0, null=True, blank=True,)
is_available_adoption = models.BooleanField(default=1)
where_was_found = models.CharField(null=True, blank=True, max_length=50, choices=TYPES_STREET, default='')
where_was_found_city = models.CharField(null=True, blank=True, max_length=100, default='')
where_was_found_state = models.CharField(null=True, blank=True, max_length=100, default='')
first_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')
second_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')
third_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')
is_mixed_breed = models.BooleanField(default=1)
is_walking_daily = models.BooleanField(default=0)
is_acupuncture = models.BooleanField(default=0)
is_physiotherapy = models.BooleanField(default=0)
is_vermifuged = models.BooleanField(default=0)
is_lice_free = models.BooleanField(default=0)
is_dog_meet_necessary = models.BooleanField(default=0)
walk_alone_dislike = models.BooleanField(default=0)
walk_alone = models.BooleanField(default=0)
walk_leash = models.BooleanField(default=0)
id_at_ong = models.IntegerField(default=0, null=True, blank=True)
def __str__(self):
return self.name
@receiver(models.signals.pre_save, sender=Pet)
def delete_file_on_change_extension(sender, instance, **kwargs):
if instance.pk:
try:
old_pic = Pet.objects.get(pk=instance.pk).profile_picture
except Pet.DoesNotExist:
return
else:
new_pic = instance.profile_picture
if old_pic and old_pic.url != new_pic.url:
old_pic.delete(save=False)
class Favorites(models.Model):
user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)
class Pet_disease_areas(models.Model):
name = models.CharField(null=True, blank=True, max_length=300)
def __str__(self):
return self.name
class Pet_disease(models.Model):
AREA_OF_PETS = [
('Cardiologia', 'Cardiologia'),
('Dermatologia', 'Dermatologia'),
('Endocrinologia', 'Endocrinologia'),
('Gastroenterologia e Hepatologia','Gastroenterologia e Hepatologia'),
('Hematologia e Imunologia', 'Hematologia e Imunologia'),
('Infecciosas', 'Infecciosas'),
('Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'),
('Musculoesquelticas', 'Musculoesquelticas'),
('Nefrologia e Urologia', 'Nefrologia e Urologia'),
('Neonatologia', 'Neonatologia'),
('Neurologia', 'Neurologia'),
('Oftalmologia', 'Oftalmologia'),
('Oncologia', 'Oncologia'),
('Respiratrias', 'Respiratrias'),
('Teriogenologia','Teriogenologia'),
('Vacinao e Nutrologia', 'Vacinao e Nutrologia'),
('Outras', 'Outras'),
]
name = models.CharField(null=True, blank=True, max_length=150)
area = models.CharField(null=True, blank=True, max_length=100,choices=AREA_OF_PETS, default='')
area_id = models.ForeignKey(Pet_disease_areas,on_delete=models.CASCADE,null=True, blank=True)
def __str__(self):
return self.name
class Pet_health(models.Model):
SPECIAL_NEED = [
('3 patas funcionais', '3 patas funcionais'),
('2 patas funcionais', '2 patas funcionais'),
('1 pata funcional', '1 pata funcional'),
('0 patas funcionais', '0 patas funcionais'),
('No pode mastigar', 'No pode mastigar'),
('Cegueira parcial', 'Cegueira parcial'),
('Cegueira total', 'Cegueira total'),
('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),
('Necessidade de terapias para sempre', 'Necessidade de terapias'),
('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),
('Doena mental','Doena mental'),
('Epilepsia','Epilesia'),
('Rabo amputado', 'Rabo amputado'),
('Surdez parcial', 'Surdez parcial'),
('Surdez total', 'Surdez total'),
('No sente cheiro','No sente cheiro')
]
STATUS = [
('Curado', 'Curado'),
('Em tratamento', 'Em tratamento'),
('Sem verba', 'Sem verba'),
]
SPECIAL_TREATMENT = [
('Fisioterapia', 'Fisioterapia'),
('Acunpuntura', 'Acunpuntura'),
('Caminhada diria', 'Caminhada diria'),
]
TYPES = [
('Fatal', 'Fatal'),
('Para o resto da vida', 'Para o resto da vida'),
('Temporria', 'Temporria'),
]
pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)
diagnose_date = models.DateField(null=True, blank=True)
#disease = models.ForeignKey(Pet_disease,on_delete=models.CASCADE)
disease_status = models.CharField(null=True, blank=True, max_length=100, choices=STATUS, default='')
disease_type = models.CharField(null=True, blank=True, max_length=100, choices=TYPES, default='')
internal_notes = models.CharField(null=True, blank=True, max_length=300)
which_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')
which_special_treatment = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_TREATMENT, default='')
#disease_area = models.ForeignKey(Pet_disease_areas,on_delete=models.CASCADE, null=True, blank=True)
disease_name = models.CharField(null=True, blank=True, max_length=200)
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
def __str__(self):
return self.disease
|
[
"import datetime\nfrom django.db import models\nfrom django.contrib.auth.models import AbstractUser\nfrom django.utils import timezone\nfrom django.conf import settings\n# Receive the pre_delete signal and delete the file associated with the model instance.\nfrom django.db.models.signals import pre_delete\nfrom django.dispatch.dispatcher import receiver\n\n\nclass Estado(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=75)\n uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.uf\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\nclass Ongs(models.Model):\n\n ADOPTION_VALUE = [ \n ('Gratuito', 'Gratuito'),\n ('10,00', '10,00'), \n ('15,00', '15,00'), \n ('20,00', '20,00'),\n ('25,00', '25,00'), \n ('30,00', '30,00'), \n ('35,00', '35,00'), \n ('40,00', '40,00'),\n ('45,00', '45,00'), \n ('50,00', '50,00'), \n ('55,00', '55,00'), \n ('60,00', '60,00'),\n ('65,00', '65,00'), \n ('70,00', '70,00'),\n ('75,00', '75,00'), \n ('80,00', '80,00'), \n ('85,00', '85,00'), \n ('90,00', '90,00'),\n ('95,00', '95,00'), \n ('100,00', '100,00'), \n ]\n\n\n OPEN_HOURS = [ \n ('06:00', '06:00'),\n ('06:30', '06:30'), \n ('07:00', '07:00'), \n ('07:30', '07:30'),\n ('08:00', '08:00'), \n ('08:30', '08:30'), \n ('09:00', '09:00'), \n ('09:30', '09:30'),\n ('10:00', '10:00'), \n ('10:30', '10:30'), \n ('11:00', '11:00'), \n ('11:30', '11:30'),\n ('12:00', '12:00'), \n ('12:30', '12:30'),\n ]\n\n CLOSE_HOURS = [ \n ('16:00', '16:00'),\n ('16:30', '16:30'), \n ('17:00', '17:00'), \n ('17:30', '17:30'),\n ('18:00', '18:00'), \n ('18:30', '18:30'), \n ('19:00', '19:00'), \n ('19:30', '19:30'),\n ('20:00', '20:00'), \n ('20:30', '20:30'), \n ('21:00', '21:00'), \n ('21:30', '21:30'),\n ('22:00', '22:00'), \n ('22:30', '22:30'),\n ]\n\n DDD = [ \n ('11', '11'),\n ('12', '12'),\n ('13', '13'),\n ('14', '14'),\n ('15', '15'),\n ('16', '16'),\n ('17', '17'),\n ('18', '18'),\n ('19', '19'),\n ('21', '21'),\n ('22', '22'),\n ('24', '24'),\n ('27', '27'),\n ('28', '28'),\n ('31', '31'),\n ('32', '32'),\n ('33', '33'),\n ('34', '34'),\n ('35', '35'),\n ('37', '37'),\n ('38', '38'),\n ('41', '41'),\n ('42', '42'),\n ('43', '43'),\n ('44', '44'),\n ('45', '45'),\n ('46', '46'),\n ('47', '47'),\n ('48', '48'),\n ('49', '49'),\n ('51', '51'),\n ('53', '53'),\n ('54', '54'),\n ('55', '55'),\n ('61', '61'),\n ('62', '62'),\n ('63', '63'),\n ('64', '64'),\n ('65', '65'),\n ('66', '66'),\n ('67', '67'),\n ('68', '68'),\n ('69', '69'),\n ('71', '71'),\n ('73', '73'),\n ('74', '74'),\n ('75', '75'),\n ('77', '77'),\n ('79', '79'),\n ('81', '81'),\n ('82', '82'),\n ('83', '83'),\n ('84', '84'),\n ('85', '85'),\n ('86', '86'),\n ('87', '87'),\n ('88', '88'),\n ('89', '89'),\n ('91', '91'),\n ('92', '92'),\n ('93', '93'),\n ('94', '94'),\n ('95', '95'),\n ('96', '96'),\n ('97', '97'),\n ('98', '98'),\n ('99', '99'),\n ]\n\n TRANSPORTATION = [ \n ('Grtis', 'Grtis'),\n ('1,00', '1,00'), \n ('2,00', '2,00'), \n ('3,00', '3,00'),\n ('4,00', '4,00'), \n ('5,00', '5,00'), \n ('7,00', '7,00'), \n ('10,00', '10,00'),\n ('12,00', '12,00'), \n ('15,00', '15,00'), \n ('18,00', '18,00'), \n ('20,00', '20,00'),\n ('25,00', '25,00'), \n ('30,00', '30,00'),\n ('35,00', '35,00'), \n ('40,00', '40,00'), \n ('45,00', '45,00'), \n ('50,00', '50,00'),\n ('55,00', '65,00'), \n ('60,00', '60,00'), \n ]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito', max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='', max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='', max_length=5, choices=CLOSE_HOURS) \n mission_statement = models.CharField(null=True, blank=True, default='', max_length=300)\n description = models.CharField(null=True, blank=True, default='', max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3, choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True, blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30, default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True, max_length=7, choices=TRANSPORTATION)\n \n def __str__(self):\n return self.name\n\nclass User(AbstractUser):\n\n PERMISSION = [\n ('Editar tudo', 'Editar tudo'), \n ('Editar equipe e pets', 'Editar equipe e pets'),\n ('Editar pets', 'Editar pets'),\n ('Visualizar equipe e pets', 'Visualizar equipe e pets'),\n ('Visualizar pets', 'Visualizar pets'),\n ]\n\n ROLE = [\n ('Advogado(a)', 'Advogado(a)'),\n ('Auxiliar de veterinrio', 'Auxiliar de veterinrio'),\n ('Bilogo(a)', 'Bilogo(a)'), \n ('Colaborador(a)', 'Colaborador(a)'), \n ('Departamento administrativo', 'Departamento administrativo'),\n ('Departamento de atendimento', 'Departamento de atendimento'), \n ('Departamento de eventos', 'Departamento de eventos'),\n ('Departamento educativo', 'Departamento educativo'), \n ('Departamento de marketing', 'Departamento de marketing'), \n ('Departamento financeiro', 'Departamento financeiro'),\n ('Diretor(a) administrativo', 'Diretor(a) administrativo'),\n ('Diretor(a) de eventos', 'Diretor(a) de eventos'),\n ('Diretor(a) financeiro', 'Diretor(a) financeiro'),\n ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), \n ('Diretor(a) tcnico', 'Diretor(a) tcnico'), \n ('Funcionrio(a)', 'Funcionrio(a)'),\n ('Fundador(a)', 'Fundador(a)'), \n ('Presidente', 'Presidente'),\n ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'),\n ('Suplente de secretrio', 'Suplente de secretrio'),\n ('Suplente de presidente', 'Suplente de presidente'),\n ('Suplente de vice-presidente', 'Suplente de vice-presidente'), \n ('Tesoreiro(a)', 'Tesoreiro(a)'), \n ('Veterinrio(a)', 'Veterinrio(a)'),\n ('Vice-presidente', 'Vice-presidente'),\n ('Voluntrio(a)', 'Voluntrio(a)'),\n ]\n\n permission_ong = models.CharField(null=True, blank=True, max_length=30, choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30, choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30) \n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True) \n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True, blank=True)\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name \n\nclass Pet(models.Model):\n\n COLOR_OF_PETS = [\n ('Amarelo', 'Amarelo'), \n ('Branco', 'Branco'), \n ('Cinza', 'Cinza'), \n ('Creme', 'Creme'),\n ('Laranja', 'Laranja'),\n ('Marrom', 'Marrom'), \n ('Preto', 'Preto'), \n ]\n\n COLOR_PATTERN_OF_PETS = [\n ('Arlequim', 'Arlequim'),\n ('Belton', 'Belton'), \n ('Bicolor', 'Bicolor'), \n ('Fulvo','Fulvo'),\n ('Lobeiro', 'Ruo'), \n ('Merle', 'Merle'),\n ('Pintaigado', 'Pintaigado'), \n ('Ruo', 'Ruo'), \n ('Sal e Pimenta', 'Sal e Pimenta'), \n ('Tigrado', 'Tigrado'),\n ('Unicolor','Unicolor')\n ]\n\n GENDER_OF_PETS = [\n ('Fmea', 'Fmea'), \n ('Macho', 'Macho'), \n ]\n\n ACTIVITY_LEVEL_PETS = [\n ('Hiperativo', 'Hiperativo'), \n ('Ativo', 'Ativo'), \n ('Moderado', 'Moderado'),\n ('Baixo', 'Baixo'), \n ]\n\n SPECIAL_NEED = [\n ('3 patas funcionais', '3 patas funcionais'), \n ('2 patas funcionais', '2 patas funcionais'),\n ('1 pata funcional', '1 pata funcional'), \n ('0 patas funcionais', '0 patas funcionais'), \n ('Apenas alguns dentes', 'Apenas alguns dentes'),\n ('Cegueira parcial', 'Cegueira parcial'), \n ('Cegueira total', 'Cegueira total'),\n ('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),\n ('Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),\n ('Nenhum dente', 'Nenhum dente'), \n ('Doena Neural','Doena Neural'),\n ('Rabo amputado', 'Rabo amputado'), \n ('Surdez parcial', 'Surdez parcial'), \n ('Surdez total', 'Surdez total'), \n ]\n\n CONFORTABLE = [ \n ('No', 'No'), \n ('Sim', 'Sim'),\n ('No sei', 'No sei'),\n ]\n\n STATUS_OF_PETS = [\n ('A caminho do novo lar', 'A caminho do novo lar'), \n ('Adoo pendente', 'Adoo pendente'), \n ('Adotado', 'Adotado'), \n ('Doente', 'Doente'),\n ('Esperando visita', 'Esperando visita'), \n ('Falecido', 'Falecido'),\n ('Retornando para abrigo','Retornando para abrigo'),\n ('Lar provisrio','Lar provisrio'),\n ('Lar provisrio pelo FDS','Lar provisrio pelo FDS'),\n\n ]\n\n STATUS_OF_TEETH = [\n ('Perfeitos', 'Perfeitos'), \n ('Um pouco de trtaro', 'Um pouco de trtaro'), \n ('Trtaro mediano', 'Trtaro mediano'),\n ('Perdeu alguns dentes', 'Perdeu alguns dentes'),\n ('Dentes permitem apenas comida mole', 'Dentes permitem apenas comida mole'), \n ('Perdeu quase todos ou todos os dentes', 'Perdeu quase todos ou todos os dentes'), \n ]\n\n COAT_OF_PETS = [\n ('Arrepiado ', 'Arrepiado'), \n ('Liso', 'Liso'),\n ('Ondulado', 'Ondulado'), \n ]\n\n COAT_SIZE_OF_PETS = [\n ('Curto', 'Curto'), \n ('Mdio', 'Mdio'),\n ('Longo', 'Longo'), \n ]\n\n SPECIES_OF_PETS = [\n ('Cachorro', 'Cachorro'), \n ('Gato', 'Gato'), \n ('Outros', 'Outros'), \n ]\n\n SIZE_OF_PETS = [\n ('Mini', 'Mini'), \n ('Pequeno', 'Pequeno'), \n ('Mdio', 'Mdio'), \n ('Grande', 'Grande'),\n ('Gigante', 'Gigante'), \n ]\n\n AGE_CATEGORY_OF_PETS = [\n ('Filhote', 'Filhote'), \n ('Adolescente', 'Adolescente'), \n ('Adulto', 'Adulto'), \n ('Maduro', 'Maduro'),\n ('Idoso', 'Idoso'), \n ]\n\n AGE_OF_PETS = [\n ('1 ms', '1 ms'), \n ('2 meses', '2 meses'), \n ('3 meses', '3 meses'), \n ('4 meses', '4 meses'),\n ('5 meses', '5 meses'), \n ('6 meses', '6 meses'), \n ('7 meses', '7 meses'), \n ('8 meses', '8 meses'),\n ('9 meses', '9 meses'), \n ('10 meses', '10 meses'), \n ('11 meses', '11 meses'),\n ('1 ano', '1 ano'), \n ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), \n ('4 anos', '4 anos'),\n ('5 anos', '5 anos'), \n ('6 anos', '6 anos'),\n ('7 anos', '7 anos'),\n ('8 anos', '8 anos'), \n ('9 anos', '9 anos'),\n ('10 anos', '10 anos'), \n ('11 anos', '11 anos'),\n ('12 anos', '12 anos'),\n ('13 anos', '13 anos'), \n ('14 anos', '14 anos'),\n ('15 anos', '15 anos'), \n ('16 anos', '16 anos'),\n ('17 anos', '17 anos'),\n ('18 anos', '18 anos'), \n ('19 anos', '19 anos'),\n ('20 anos', '20 anos'), \n ('21 anos', '21 anos'),\n ('22 anos', '22 anos'), \n ('23 anos', '23 anos'),\n ('24 anos', '24 anos'), \n ('25 anos', '25 anos'),\n ('26 anos', '26 anos'), \n ('27 anos', '27 anos'),\n ('28 anos', '28 anos'), \n ('29 anos', '29 anos'),\n ('30 anos', '30 anos'), \n ('No sei', 'No sei'),\n ]\n\n\n DAY_OF_PETS = [ \n ('No sei', 'No sei'),\n ('1', '1'), \n ('2', '2'),\n ('3', '3'), \n ('4', '4'),\n ('5', '5'), \n ('6', '6'),\n ('7', '7'),\n ('8', '8'), \n ('9', '9'),\n ('10', '10'), \n ('11', '11'),\n ('12', '12'),\n ('13', '13'), \n ('14', '14'),\n ('15', '15'), \n ('16', '16'),\n ('17', '17'),\n ('18', '18'), \n ('19', '19'),\n ('20', '20'), \n ('21', '21'),\n ('22', '22'), \n ('23', '23'),\n ('24', '24'), \n ('25', '25'),\n ('26', '26'), \n ('27', '27'),\n ('28', '28'), \n ('29', '29'),\n ('30', '30'),\n ('31', '31'), \n ]\n\n MONTH_OF_PETS = [\n ('No sei', 'No sei'), \n ('Janeiro', 'Janeiro'), \n ('Fevereiro', 'Fevereiro'), \n ('Maro', 'Maro'),\n ('Abril', 'Abril'), \n ('Maio', 'Maio'), \n ('Junho', 'Junho'), \n ('Julho', 'Julho'),\n ('Agosto', 'Agosto'), \n ('Setembro', 'Setembro'), \n ('Outubro', 'Outubro'),\n ('Novembro', 'Novembro'), \n ('Dezembro', 'Dezembro'),\n ]\n\n AGE_OF_PETS = [\n ('1 ms', '1 ms'), \n ('2 meses', '2 meses'), \n ('3 meses', '3 meses'), \n ('4 meses', '4 meses'),\n ('5 meses', '5 meses'), \n ('6 meses', '6 meses'), \n ('7 meses', '7 meses'), \n ('8 meses', '8 meses'),\n ('9 meses', '9 meses'), \n ('10 meses', '10 meses'), \n ('11 meses', '11 meses'),\n ('1 ano', '1 ano'), \n ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), \n ('4 anos', '4 anos'),\n ('5 anos', '5 anos'), \n ('6 anos', '6 anos'),\n ('7 anos', '7 anos'),\n ('8 anos', '8 anos'), \n ('9 anos', '9 anos'),\n ('10 anos', '10 anos'), \n ('11 anos', '11 anos'),\n ('12 anos', '12 anos'),\n ('13 anos', '13 anos'), \n ('14 anos', '14 anos'),\n ('15 anos', '15 anos'), \n ('16 anos', '16 anos'),\n ('17 anos', '17 anos'),\n ('18 anos', '18 anos'), \n ('19 anos', '19 anos'),\n ('20 anos', '20 anos'), \n ('21 anos', '21 anos'),\n ('22 anos', '22 anos'), \n ('23 anos', '23 anos'),\n ('24 anos', '24 anos'), \n ('25 anos', '25 anos'),\n ('26 anos', '26 anos'), \n ('27 anos', '27 anos'),\n ('28 anos', '28 anos'), \n ('29 anos', '29 anos'),\n ('30 anos', '30 anos'), \n ('Menos de 1 ano', 'Menos de 1 ano'),\n ]\n\n RETURN_OF_PETS = [ \n (0, 0),\n (1, 1), \n (2, 2),\n (3, 3), \n (4, 4),\n (5, 5), \n (6, 6),\n (7, 7),\n (8, 8), \n (9, 9),\n (10, 10), \n ]\n\n TYPES_STREET = [\n ('Alameda', 'Alameda'),\n ('Avenida', 'Avenida'),\n ('Chcara', 'Chcara'),\n ('Colnia', 'Colnia'),\n ('Condomnio', 'Condomnio'),\n ('Conjunto', 'Conjunto'),\n ('Estao', 'Estao'),\n ('Estrada', 'Estrada'),\n ('Favela', 'Favela'),\n ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'),\n ('Ladeira', 'Ladeira'),\n ('Lago', 'Lago'),\n ('Largo', 'Largo'),\n ('Loteamento', 'Loteamento'),\n ('Passarela', 'Passarela'),\n ('Parque', 'Parque'),\n ('Praa', 'Praa'),\n ('Praia','Praia'),\n ('Rodovia', 'Rodovia'),\n ('Rua', 'Rua'),\n ('Setor', 'Setor'),\n ('Travessa', 'Travessa'),\n ('Viaduto', 'Viaduto'),\n ('Vila', 'Vila'),\n ]\n\n SPECIAL_NEED = [\n ('3 patas funcionais', '3 patas funcionais'), \n ('2 patas funcionais', '2 patas funcionais'),\n ('1 pata funcional', '1 pata funcional'), \n ('0 patas funcionais', '0 patas funcionais'), \n ('No pode mastigar', 'No pode mastigar'),\n ('Cegueira parcial', 'Cegueira parcial'), \n ('Cegueira total', 'Cegueira total'),\n ('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),\n ('Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),\n ('Doena mental','Doena mental'),\n ('Epilepsia','Epilesia'),\n ('Rabo amputado', 'Rabo amputado'), \n ('Surdez parcial', 'Surdez parcial'), \n ('Surdez total', 'Surdez total'), \n ('No sente cheiro','No sente cheiro') \n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n \n a = []\n for i in reversed(range(past,now)):\n a.append((i,i))\n a = tuple(a) \n\n return a\n\n name = models.CharField(\"Nome\", null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length = 700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30, choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE, null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE, null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30,choices=COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20,choices=COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices=GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True, max_length=30,choices=DAY_OF_PETS,)\n birth_month = models.CharField(default=0, null=True, blank=True, max_length=30,choices=MONTH_OF_PETS,)\n birth_year = models.IntegerField(default=0, null=True, blank=True, choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40, choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True, max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30, default='')\n where_was_found_name = models.CharField(null=True, blank=True, max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices=STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50, choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50,choices=SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50, choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True,)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50, choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True, max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True, max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n@receiver(models.signals.pre_save, sender=Pet)\ndef delete_file_on_change_extension(sender, instance, **kwargs):\n if instance.pk:\n try:\n old_pic = Pet.objects.get(pk=instance.pk).profile_picture\n except Pet.DoesNotExist:\n return\n else:\n new_pic = instance.profile_picture\n if old_pic and old_pic.url != new_pic.url:\n old_pic.delete(save=False)\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\nclass Pet_disease(models.Model):\n\n AREA_OF_PETS = [\n ('Cardiologia', 'Cardiologia'), \n ('Dermatologia', 'Dermatologia'),\n ('Endocrinologia', 'Endocrinologia'), \n ('Gastroenterologia e Hepatologia','Gastroenterologia e Hepatologia'),\n ('Hematologia e Imunologia', 'Hematologia e Imunologia'), \n ('Infecciosas', 'Infecciosas'), \n ('Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), \n ('Musculoesquelticas', 'Musculoesquelticas'),\n ('Nefrologia e Urologia', 'Nefrologia e Urologia'), \n ('Neonatologia', 'Neonatologia'), \n ('Neurologia', 'Neurologia'), \n ('Oftalmologia', 'Oftalmologia'), \n ('Oncologia', 'Oncologia'), \n ('Respiratrias', 'Respiratrias'),\n ('Teriogenologia','Teriogenologia'),\n ('Vacinao e Nutrologia', 'Vacinao e Nutrologia'), \n ('Outras', 'Outras'), \n ]\n\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100,choices=AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas,on_delete=models.CASCADE,null=True, blank=True)\n\n def __str__(self):\n return self.name\n\nclass Pet_health(models.Model):\n\n SPECIAL_NEED = [\n ('3 patas funcionais', '3 patas funcionais'), \n ('2 patas funcionais', '2 patas funcionais'),\n ('1 pata funcional', '1 pata funcional'), \n ('0 patas funcionais', '0 patas funcionais'), \n ('No pode mastigar', 'No pode mastigar'),\n ('Cegueira parcial', 'Cegueira parcial'), \n ('Cegueira total', 'Cegueira total'),\n ('Necessidade de remdios para sempre', 'Necessidade de remdios para sempre'),\n ('Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre', 'Necessidade de terapias e remdios para sempre'),\n ('Doena mental','Doena mental'),\n ('Epilepsia','Epilesia'),\n ('Rabo amputado', 'Rabo amputado'), \n ('Surdez parcial', 'Surdez parcial'), \n ('Surdez total', 'Surdez total'), \n ('No sente cheiro','No sente cheiro') \n ]\n\n STATUS = [\n ('Curado', 'Curado'), \n ('Em tratamento', 'Em tratamento'),\n ('Sem verba', 'Sem verba'), \n ]\n\n SPECIAL_TREATMENT = [\n ('Fisioterapia', 'Fisioterapia'), \n ('Acunpuntura', 'Acunpuntura'),\n ('Caminhada diria', 'Caminhada diria'), \n ]\n\n TYPES = [\n ('Fatal', 'Fatal'), \n ('Para o resto da vida', 'Para o resto da vida'), \n ('Temporria', 'Temporria'),\n ]\n\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n #disease = models.ForeignKey(Pet_disease,on_delete=models.CASCADE)\n disease_status = models.CharField(null=True, blank=True, max_length=100, choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100, choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300) \n which_special_need = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True, max_length=100, choices=SPECIAL_TREATMENT, default='')\n #disease_area = models.ForeignKey(Pet_disease_areas,on_delete=models.CASCADE, null=True, blank=True)\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"import datetime\nfrom django.db import models\nfrom django.contrib.auth.models import AbstractUser\nfrom django.utils import timezone\nfrom django.conf import settings\nfrom django.db.models.signals import pre_delete\nfrom django.dispatch.dispatcher import receiver\n\n\nclass Estado(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=75)\n uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.uf\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n@receiver(models.signals.pre_save, sender=Pet)\ndef delete_file_on_change_extension(sender, instance, **kwargs):\n if instance.pk:\n try:\n old_pic = Pet.objects.get(pk=instance.pk).profile_picture\n except Pet.DoesNotExist:\n return\n else:\n new_pic = instance.profile_picture\n if old_pic and old_pic.url != new_pic.url:\n old_pic.delete(save=False)\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n\n\nclass Estado(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=75)\n uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.uf\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n@receiver(models.signals.pre_save, sender=Pet)\ndef delete_file_on_change_extension(sender, instance, **kwargs):\n if instance.pk:\n try:\n old_pic = Pet.objects.get(pk=instance.pk).profile_picture\n except Pet.DoesNotExist:\n return\n else:\n new_pic = instance.profile_picture\n if old_pic and old_pic.url != new_pic.url:\n old_pic.delete(save=False)\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n\n\nclass Estado(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=75)\n uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.uf\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n\n\nclass Estado(models.Model):\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.uf\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n\n\nclass Estado(models.Model):\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n\n\nclass Cidade(models.Model):\n nome = models.CharField(null=True, blank=True, max_length=120)\n estado = models.ForeignKey(Estado, on_delete=models.CASCADE)\n estado_uf = models.CharField(null=True, blank=True, max_length=5)\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n\n\nclass Cidade(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.nome\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n\n\nclass Cidade(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n\n\nclass Ongs(models.Model):\n ADOPTION_VALUE = [('Gratuito', 'Gratuito'), ('10,00', '10,00'), (\n '15,00', '15,00'), ('20,00', '20,00'), ('25,00', '25,00'), ('30,00',\n '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00', '45,00'\n ), ('50,00', '50,00'), ('55,00', '55,00'), ('60,00', '60,00'), (\n '65,00', '65,00'), ('70,00', '70,00'), ('75,00', '75,00'), ('80,00',\n '80,00'), ('85,00', '85,00'), ('90,00', '90,00'), ('95,00', '95,00'\n ), ('100,00', '100,00')]\n OPEN_HOURS = [('06:00', '06:00'), ('06:30', '06:30'), ('07:00', '07:00'\n ), ('07:30', '07:30'), ('08:00', '08:00'), ('08:30', '08:30'), (\n '09:00', '09:00'), ('09:30', '09:30'), ('10:00', '10:00'), ('10:30',\n '10:30'), ('11:00', '11:00'), ('11:30', '11:30'), ('12:00', '12:00'\n ), ('12:30', '12:30')]\n CLOSE_HOURS = [('16:00', '16:00'), ('16:30', '16:30'), ('17:00',\n '17:00'), ('17:30', '17:30'), ('18:00', '18:00'), ('18:30', '18:30'\n ), ('19:00', '19:00'), ('19:30', '19:30'), ('20:00', '20:00'), (\n '20:30', '20:30'), ('21:00', '21:00'), ('21:30', '21:30'), ('22:00',\n '22:00'), ('22:30', '22:30')]\n DDD = [('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15',\n '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), (\n '21', '21'), ('22', '22'), ('24', '24'), ('27', '27'), ('28', '28'),\n ('31', '31'), ('32', '32'), ('33', '33'), ('34', '34'), ('35', '35'\n ), ('37', '37'), ('38', '38'), ('41', '41'), ('42', '42'), ('43',\n '43'), ('44', '44'), ('45', '45'), ('46', '46'), ('47', '47'), (\n '48', '48'), ('49', '49'), ('51', '51'), ('53', '53'), ('54', '54'),\n ('55', '55'), ('61', '61'), ('62', '62'), ('63', '63'), ('64', '64'\n ), ('65', '65'), ('66', '66'), ('67', '67'), ('68', '68'), ('69',\n '69'), ('71', '71'), ('73', '73'), ('74', '74'), ('75', '75'), (\n '77', '77'), ('79', '79'), ('81', '81'), ('82', '82'), ('83', '83'),\n ('84', '84'), ('85', '85'), ('86', '86'), ('87', '87'), ('88', '88'\n ), ('89', '89'), ('91', '91'), ('92', '92'), ('93', '93'), ('94',\n '94'), ('95', '95'), ('96', '96'), ('97', '97'), ('98', '98'), (\n '99', '99')]\n TRANSPORTATION = [('Grtis', 'Grtis'), ('1,00', '1,00'), ('2,00', '2,00'\n ), ('3,00', '3,00'), ('4,00', '4,00'), ('5,00', '5,00'), ('7,00',\n '7,00'), ('10,00', '10,00'), ('12,00', '12,00'), ('15,00', '15,00'),\n ('18,00', '18,00'), ('20,00', '20,00'), ('25,00', '25,00'), (\n '30,00', '30,00'), ('35,00', '35,00'), ('40,00', '40,00'), ('45,00',\n '45,00'), ('50,00', '50,00'), ('55,00', '65,00'), ('60,00', '60,00')]\n name = models.CharField(null=True, blank=True, max_length=40)\n rate = models.CharField(null=True, blank=True, default='Gratuito',\n max_length=8, choices=ADOPTION_VALUE)\n hour_open = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=OPEN_HOURS)\n hour_close = models.CharField(blank=True, null=True, default='',\n max_length=5, choices=CLOSE_HOURS)\n mission_statement = models.CharField(null=True, blank=True, default='',\n max_length=300)\n description = models.CharField(null=True, blank=True, default='',\n max_length=500)\n web_site = models.CharField(null=True, blank=True, max_length=150)\n phone_number_ddd = models.CharField(null=True, blank=True, max_length=3,\n choices=DDD)\n phone_number = models.CharField(null=True, blank=True, max_length=12)\n email = models.CharField(null=True, blank=True, max_length=100)\n facebook = models.CharField(null=True, blank=True, max_length=100)\n instagram = models.CharField(null=True, blank=True, max_length=40)\n logo_link = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n city_id = models.ForeignKey(Cidade, on_delete=models.CASCADE, null=True,\n blank=True)\n state_id = models.ForeignKey(Estado, on_delete=models.CASCADE, null=\n True, blank=True)\n is_foster_ok = models.BooleanField(default=0)\n is_volunteer_ok = models.BooleanField(default=0)\n has_transportation = models.BooleanField(default=0)\n cnpj = models.CharField(null=True, blank=True, max_length=18)\n founded_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n is_approved = models.BooleanField(default=0)\n transportation_price = models.CharField(null=True, blank=True,\n max_length=7, choices=TRANSPORTATION)\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n\n\nclass Ongs(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n\n\nclass Ongs(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass User(AbstractUser):\n PERMISSION = [('Editar tudo', 'Editar tudo'), ('Editar equipe e pets',\n 'Editar equipe e pets'), ('Editar pets', 'Editar pets'), (\n 'Visualizar equipe e pets', 'Visualizar equipe e pets'), (\n 'Visualizar pets', 'Visualizar pets')]\n ROLE = [('Advogado(a)', 'Advogado(a)'), ('Auxiliar de veterinrio',\n 'Auxiliar de veterinrio'), ('Bilogo(a)', 'Bilogo(a)'), (\n 'Colaborador(a)', 'Colaborador(a)'), ('Departamento administrativo',\n 'Departamento administrativo'), ('Departamento de atendimento',\n 'Departamento de atendimento'), ('Departamento de eventos',\n 'Departamento de eventos'), ('Departamento educativo',\n 'Departamento educativo'), ('Departamento de marketing',\n 'Departamento de marketing'), ('Departamento financeiro',\n 'Departamento financeiro'), ('Diretor(a) administrativo',\n 'Diretor(a) administrativo'), ('Diretor(a) de eventos',\n 'Diretor(a) de eventos'), ('Diretor(a) financeiro',\n 'Diretor(a) financeiro'), ('Diretor(a) geral', 'Diretor(a) geral'),\n ('Diretor(a) marketing', 'Diretor(a) marketing'), (\n 'Diretor(a) tcnico', 'Diretor(a) tcnico'), ('Funcionrio(a)',\n 'Funcionrio(a)'), ('Fundador(a)', 'Fundador(a)'), ('Presidente',\n 'Presidente'), ('Protetor(a) associado', 'Protetor(a) associado'),\n ('Secretrio(a)', 'Secretrio(a)'), ('Suplente de secretrio',\n 'Suplente de secretrio'), ('Suplente de presidente',\n 'Suplente de presidente'), ('Suplente de vice-presidente',\n 'Suplente de vice-presidente'), ('Tesoreiro(a)', 'Tesoreiro(a)'), (\n 'Veterinrio(a)', 'Veterinrio(a)'), ('Vice-presidente',\n 'Vice-presidente'), ('Voluntrio(a)', 'Voluntrio(a)')]\n permission_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=PERMISSION)\n role_ong = models.CharField(null=True, blank=True, max_length=30,\n choices=ROLE)\n birth_date = models.DateField(null=True, blank=True)\n has_confirmed_email = models.BooleanField(default=0)\n country = models.CharField(null=True, blank=True, max_length=50)\n state_code = models.CharField(null=True, blank=True, max_length=3)\n city = models.CharField(null=True, blank=True, max_length=50)\n neighborhood = models.CharField(null=True, blank=True, max_length=50)\n rg = models.CharField(null=True, blank=True, max_length=12)\n cpf = models.CharField(null=True, blank=True, max_length=15)\n phone_number_ddd = models.CharField(null=True, max_length=3)\n phone_number = models.CharField(null=True, blank=True, max_length=10)\n address_street = models.CharField(null=True, blank=True, max_length=70)\n address_number = models.CharField(null=True, blank=True, max_length=6)\n address_complement = models.CharField(null=True, blank=True, max_length=10)\n postal_code = models.CharField(null=True, blank=True, max_length=10)\n facebook_id = models.CharField(null=True, blank=True, max_length=30)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.SET_NULL, null=True,\n blank=True)\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n\n\nclass User(AbstractUser):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_breed(models.Model):\n name = models.CharField(null=True, blank=True, max_length=100)\n species = models.CharField(null=True, blank=True, max_length=30)\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_breed(models.Model):\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_breed(models.Model):\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet(models.Model):\n COLOR_OF_PETS = [('Amarelo', 'Amarelo'), ('Branco', 'Branco'), ('Cinza',\n 'Cinza'), ('Creme', 'Creme'), ('Laranja', 'Laranja'), ('Marrom',\n 'Marrom'), ('Preto', 'Preto')]\n COLOR_PATTERN_OF_PETS = [('Arlequim', 'Arlequim'), ('Belton', 'Belton'),\n ('Bicolor', 'Bicolor'), ('Fulvo', 'Fulvo'), ('Lobeiro', 'Ruo'), (\n 'Merle', 'Merle'), ('Pintaigado', 'Pintaigado'), ('Ruo', 'Ruo'), (\n 'Sal e Pimenta', 'Sal e Pimenta'), ('Tigrado', 'Tigrado'), (\n 'Unicolor', 'Unicolor')]\n GENDER_OF_PETS = [('Fmea', 'Fmea'), ('Macho', 'Macho')]\n ACTIVITY_LEVEL_PETS = [('Hiperativo', 'Hiperativo'), ('Ativo', 'Ativo'),\n ('Moderado', 'Moderado'), ('Baixo', 'Baixo')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('Apenas alguns dentes', 'Apenas alguns dentes'), (\n 'Cegueira parcial', 'Cegueira parcial'), ('Cegueira total',\n 'Cegueira total'), ('Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Nenhum dente',\n 'Nenhum dente'), ('Doena Neural', 'Doena Neural'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total')]\n CONFORTABLE = [('No', 'No'), ('Sim', 'Sim'), ('No sei', 'No sei')]\n STATUS_OF_PETS = [('A caminho do novo lar', 'A caminho do novo lar'), (\n 'Adoo pendente', 'Adoo pendente'), ('Adotado', 'Adotado'), (\n 'Doente', 'Doente'), ('Esperando visita', 'Esperando visita'), (\n 'Falecido', 'Falecido'), ('Retornando para abrigo',\n 'Retornando para abrigo'), ('Lar provisrio', 'Lar provisrio'), (\n 'Lar provisrio pelo FDS', 'Lar provisrio pelo FDS')]\n STATUS_OF_TEETH = [('Perfeitos', 'Perfeitos'), ('Um pouco de trtaro',\n 'Um pouco de trtaro'), ('Trtaro mediano', 'Trtaro mediano'), (\n 'Perdeu alguns dentes', 'Perdeu alguns dentes'), (\n 'Dentes permitem apenas comida mole',\n 'Dentes permitem apenas comida mole'), (\n 'Perdeu quase todos ou todos os dentes',\n 'Perdeu quase todos ou todos os dentes')]\n COAT_OF_PETS = [('Arrepiado ', 'Arrepiado'), ('Liso', 'Liso'), (\n 'Ondulado', 'Ondulado')]\n COAT_SIZE_OF_PETS = [('Curto', 'Curto'), ('Mdio', 'Mdio'), ('Longo',\n 'Longo')]\n SPECIES_OF_PETS = [('Cachorro', 'Cachorro'), ('Gato', 'Gato'), (\n 'Outros', 'Outros')]\n SIZE_OF_PETS = [('Mini', 'Mini'), ('Pequeno', 'Pequeno'), ('Mdio',\n 'Mdio'), ('Grande', 'Grande'), ('Gigante', 'Gigante')]\n AGE_CATEGORY_OF_PETS = [('Filhote', 'Filhote'), ('Adolescente',\n 'Adolescente'), ('Adulto', 'Adulto'), ('Maduro', 'Maduro'), (\n 'Idoso', 'Idoso')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('No sei', 'No sei')]\n DAY_OF_PETS = [('No sei', 'No sei'), ('1', '1'), ('2', '2'), ('3', '3'),\n ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9',\n '9'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14',\n '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), (\n '19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'),\n ('24', '24'), ('25', '25'), ('26', '26'), ('27', '27'), ('28', '28'\n ), ('29', '29'), ('30', '30'), ('31', '31')]\n MONTH_OF_PETS = [('No sei', 'No sei'), ('Janeiro', 'Janeiro'), (\n 'Fevereiro', 'Fevereiro'), ('Maro', 'Maro'), ('Abril', 'Abril'), (\n 'Maio', 'Maio'), ('Junho', 'Junho'), ('Julho', 'Julho'), ('Agosto',\n 'Agosto'), ('Setembro', 'Setembro'), ('Outubro', 'Outubro'), (\n 'Novembro', 'Novembro'), ('Dezembro', 'Dezembro')]\n AGE_OF_PETS = [('1 ms', '1 ms'), ('2 meses', '2 meses'), ('3 meses',\n '3 meses'), ('4 meses', '4 meses'), ('5 meses', '5 meses'), (\n '6 meses', '6 meses'), ('7 meses', '7 meses'), ('8 meses',\n '8 meses'), ('9 meses', '9 meses'), ('10 meses', '10 meses'), (\n '11 meses', '11 meses'), ('1 ano', '1 ano'), ('2 anos', '2 anos'),\n ('3 anos', '3 anos'), ('4 anos', '4 anos'), ('5 anos', '5 anos'), (\n '6 anos', '6 anos'), ('7 anos', '7 anos'), ('8 anos', '8 anos'), (\n '9 anos', '9 anos'), ('10 anos', '10 anos'), ('11 anos', '11 anos'),\n ('12 anos', '12 anos'), ('13 anos', '13 anos'), ('14 anos',\n '14 anos'), ('15 anos', '15 anos'), ('16 anos', '16 anos'), (\n '17 anos', '17 anos'), ('18 anos', '18 anos'), ('19 anos',\n '19 anos'), ('20 anos', '20 anos'), ('21 anos', '21 anos'), (\n '22 anos', '22 anos'), ('23 anos', '23 anos'), ('24 anos',\n '24 anos'), ('25 anos', '25 anos'), ('26 anos', '26 anos'), (\n '27 anos', '27 anos'), ('28 anos', '28 anos'), ('29 anos',\n '29 anos'), ('30 anos', '30 anos'), ('Menos de 1 ano',\n 'Menos de 1 ano')]\n RETURN_OF_PETS = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6\n ), (7, 7), (8, 8), (9, 9), (10, 10)]\n TYPES_STREET = [('Alameda', 'Alameda'), ('Avenida', 'Avenida'), (\n 'Chcara', 'Chcara'), ('Colnia', 'Colnia'), ('Condomnio',\n 'Condomnio'), ('Conjunto', 'Conjunto'), ('Estao', 'Estao'), (\n 'Estrada', 'Estrada'), ('Favela', 'Favela'), ('Fazenda', 'Fazenda'),\n ('Jardim', 'Jardim'), ('Ladeira', 'Ladeira'), ('Lago', 'Lago'), (\n 'Largo', 'Largo'), ('Loteamento', 'Loteamento'), ('Passarela',\n 'Passarela'), ('Parque', 'Parque'), ('Praa', 'Praa'), ('Praia',\n 'Praia'), ('Rodovia', 'Rodovia'), ('Rua', 'Rua'), ('Setor', 'Setor'\n ), ('Travessa', 'Travessa'), ('Viaduto', 'Viaduto'), ('Vila', 'Vila')]\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n name = models.CharField('Nome', null=True, blank=True, max_length=30)\n pet_description = models.CharField(null=True, blank=True, max_length=700)\n age = models.CharField(null=True, blank=True, max_length=40, choices=\n AGE_OF_PETS, default='')\n age_category = models.CharField(null=True, blank=True, max_length=30,\n choices=AGE_CATEGORY_OF_PETS, default='')\n species = models.CharField(null=True, blank=True, max_length=25,\n choices=SPECIES_OF_PETS, default='')\n primary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='primary_breed')\n secondary_breed = models.ForeignKey(Pet_breed, on_delete=models.CASCADE,\n null=True, blank=True, related_name='secondary_breed')\n color = models.CharField(null=True, blank=True, max_length=30, choices=\n COLOR_OF_PETS, default='')\n coat = models.CharField(null=True, blank=True, max_length=20, choices=\n COAT_OF_PETS, default='')\n gender = models.CharField(null=True, blank=True, max_length=10, choices\n =GENDER_OF_PETS, default='')\n birth_day = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=DAY_OF_PETS)\n birth_month = models.CharField(default=0, null=True, blank=True,\n max_length=30, choices=MONTH_OF_PETS)\n birth_year = models.IntegerField(default=0, null=True, blank=True,\n choices=get_years())\n is_microchiped = models.BooleanField(default=0)\n activity_level = models.CharField(null=True, blank=True, max_length=40,\n choices=ACTIVITY_LEVEL_PETS, default='')\n is_basic_trainned = models.BooleanField(default=0)\n confortable_with_kids = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_elder = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_cats = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_dogs = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_men = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n confortable_with_women = models.CharField(null=True, blank=True,\n max_length=100, choices=CONFORTABLE, default='')\n arrival_date = models.CharField(null=True, blank=True, max_length=30,\n default='')\n where_was_found_name = models.CharField(null=True, blank=True,\n max_length=100, default='')\n is_neutered = models.BooleanField(default=0)\n was_rabbies_vaccinated_this_year = models.BooleanField(default=0)\n was_v_vaccinated_this_year = models.BooleanField(default=0)\n was_others_vaccinated_this_year = models.BooleanField(default=0)\n profile_picture = models.ImageField(null=True, blank=True)\n picture_1 = models.ImageField(null=True, blank=True)\n picture_2 = models.ImageField(null=True, blank=True)\n picture_3 = models.ImageField(null=True, blank=True)\n video = models.CharField(null=True, blank=True, max_length=150)\n qty_views = models.IntegerField(default=0)\n qty_favorites = models.IntegerField(default=0)\n qty_msg = models.IntegerField(default=0)\n qty_shares = models.IntegerField(default=0)\n ongs_id = models.ForeignKey(Ongs, on_delete=models.CASCADE, default=1)\n status = models.CharField(null=True, blank=True, max_length=50, choices\n =STATUS_OF_PETS, default='')\n coat_size = models.CharField(null=True, blank=True, max_length=50,\n choices=COAT_SIZE_OF_PETS, default='')\n walk_pull = models.BooleanField(default=0)\n walk_pull_hard = models.BooleanField(default=0)\n walk_dogs = models.BooleanField(default=0)\n walk_people = models.BooleanField(default=0)\n walk_fear = models.BooleanField(default=0)\n color_pattern = models.CharField(null=True, blank=True, max_length=30,\n choices=COLOR_PATTERN_OF_PETS, default='')\n size = models.CharField(null=True, blank=True, max_length=50, choices=\n SIZE_OF_PETS, default='')\n qty_preview_adoptions = models.IntegerField(default=0, choices=\n RETURN_OF_PETS)\n qty_adoptions_app = models.IntegerField(default=0)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n teeth_status = models.CharField(null=True, blank=True, max_length=50,\n choices=STATUS_OF_TEETH, default='')\n combo_adoption_id = models.IntegerField(default=0, null=True, blank=True)\n is_available_adoption = models.BooleanField(default=1)\n where_was_found = models.CharField(null=True, blank=True, max_length=50,\n choices=TYPES_STREET, default='')\n where_was_found_city = models.CharField(null=True, blank=True,\n max_length=100, default='')\n where_was_found_state = models.CharField(null=True, blank=True,\n max_length=100, default='')\n first_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n second_special_need = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_NEED, default='')\n third_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n is_mixed_breed = models.BooleanField(default=1)\n is_walking_daily = models.BooleanField(default=0)\n is_acupuncture = models.BooleanField(default=0)\n is_physiotherapy = models.BooleanField(default=0)\n is_vermifuged = models.BooleanField(default=0)\n is_lice_free = models.BooleanField(default=0)\n is_dog_meet_necessary = models.BooleanField(default=0)\n walk_alone_dislike = models.BooleanField(default=0)\n walk_alone = models.BooleanField(default=0)\n walk_leash = models.BooleanField(default=0)\n id_at_ong = models.IntegerField(default=0, null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def get_years():\n now = int(timezone.now().year) + 1\n past = timezone.now().year - 30\n a = []\n for i in reversed(range(past, now)):\n a.append((i, i))\n a = tuple(a)\n return a\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n\n\nclass Favorites(models.Model):\n user_id = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE)\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n\n\nclass Favorites(models.Model):\n <assignment token>\n <assignment token>\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n\n\nclass Pet_disease_areas(models.Model):\n name = models.CharField(null=True, blank=True, max_length=300)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n\n\nclass Pet_disease_areas(models.Model):\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n\n\nclass Pet_disease_areas(models.Model):\n <assignment token>\n <function token>\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n\n\nclass Pet_disease(models.Model):\n AREA_OF_PETS = [('Cardiologia', 'Cardiologia'), ('Dermatologia',\n 'Dermatologia'), ('Endocrinologia', 'Endocrinologia'), (\n 'Gastroenterologia e Hepatologia',\n 'Gastroenterologia e Hepatologia'), ('Hematologia e Imunologia',\n 'Hematologia e Imunologia'), ('Infecciosas', 'Infecciosas'), (\n 'Intoxicaes e Envenemanentos', 'Intoxicaes e Envenemanentos'), (\n 'Musculoesquelticas', 'Musculoesquelticas'), (\n 'Nefrologia e Urologia', 'Nefrologia e Urologia'), ('Neonatologia',\n 'Neonatologia'), ('Neurologia', 'Neurologia'), ('Oftalmologia',\n 'Oftalmologia'), ('Oncologia', 'Oncologia'), ('Respiratrias',\n 'Respiratrias'), ('Teriogenologia', 'Teriogenologia'), (\n 'Vacinao e Nutrologia', 'Vacinao e Nutrologia'), ('Outras', 'Outras')]\n name = models.CharField(null=True, blank=True, max_length=150)\n area = models.CharField(null=True, blank=True, max_length=100, choices=\n AREA_OF_PETS, default='')\n area_id = models.ForeignKey(Pet_disease_areas, on_delete=models.CASCADE,\n null=True, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n\n\nclass Pet_disease(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n\n\nclass Pet_disease(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_health(models.Model):\n SPECIAL_NEED = [('3 patas funcionais', '3 patas funcionais'), (\n '2 patas funcionais', '2 patas funcionais'), ('1 pata funcional',\n '1 pata funcional'), ('0 patas funcionais', '0 patas funcionais'),\n ('No pode mastigar', 'No pode mastigar'), ('Cegueira parcial',\n 'Cegueira parcial'), ('Cegueira total', 'Cegueira total'), (\n 'Necessidade de remdios para sempre',\n 'Necessidade de remdios para sempre'), (\n 'Necessidade de terapias para sempre', 'Necessidade de terapias'),\n ('Necessidade de terapias e remdios para sempre',\n 'Necessidade de terapias e remdios para sempre'), ('Doena mental',\n 'Doena mental'), ('Epilepsia', 'Epilesia'), ('Rabo amputado',\n 'Rabo amputado'), ('Surdez parcial', 'Surdez parcial'), (\n 'Surdez total', 'Surdez total'), ('No sente cheiro', 'No sente cheiro')\n ]\n STATUS = [('Curado', 'Curado'), ('Em tratamento', 'Em tratamento'), (\n 'Sem verba', 'Sem verba')]\n SPECIAL_TREATMENT = [('Fisioterapia', 'Fisioterapia'), ('Acunpuntura',\n 'Acunpuntura'), ('Caminhada diria', 'Caminhada diria')]\n TYPES = [('Fatal', 'Fatal'), ('Para o resto da vida',\n 'Para o resto da vida'), ('Temporria', 'Temporria')]\n pet_id = models.ForeignKey(Pet, on_delete=models.CASCADE)\n diagnose_date = models.DateField(null=True, blank=True)\n disease_status = models.CharField(null=True, blank=True, max_length=100,\n choices=STATUS, default='')\n disease_type = models.CharField(null=True, blank=True, max_length=100,\n choices=TYPES, default='')\n internal_notes = models.CharField(null=True, blank=True, max_length=300)\n which_special_need = models.CharField(null=True, blank=True, max_length\n =100, choices=SPECIAL_NEED, default='')\n which_special_treatment = models.CharField(null=True, blank=True,\n max_length=100, choices=SPECIAL_TREATMENT, default='')\n disease_name = models.CharField(null=True, blank=True, max_length=200)\n created_at = models.DateField(auto_now_add=True)\n updated_at = models.DateField(auto_now=True)\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_health(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.disease\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n\n\nclass Pet_health(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<class token>\n<function token>\n<class token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,045 |
f3dd9bc175f5a86f2f41a8aaac04c68583264859
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 20 18:51:32 2021
@author: Kathleen Lucía Torres Mancilla 298944
Francisco Javier Vite Mimila 299043
Linear Solve: Gradiente descendente
Dejar en una función el método del gradiente.
"""
import numpy as np
'''
El objetivo es resolver Ax = b
Entonces para ello, hicimos lo siguiente:
Ax - b --> Discrepancia de un tanto para un x tanteado
Por facilidad, definimos el error cuadrado total como
(Ax - b)^2
Que matricialmente se puede expresar como:
(Ax - b)' (Ax - b) = Error total cuadrado
Derivamos y encontramos que:
dE = 2A'Ax - 2A'b
La idea es minimizar el error total cuadrado, porque
al hacerlo, llegaríamos al único mínimo del paraboloide
que es cuando (Ax - b)^2 = 0 y entonces, esto implicaría
que Ax = b y por ende x sería la solución del sistema de
ecuaciones.
'''
#A = [[1,2,3],[1,2,3],[1,2,4]]
#mi_producto(A, transpuesta(A))
#x_nuevo = x_viejo - k * Gradiente
#Sistema de ecuaciones que se va a resolver:
A_coef = np.array([[2.0, 1.0, -3.0], [5.0, -4.0, 1.0], [1.0, -1.0, -4.0]])
b_coef = np.array([7.0, -19.0, 4.0])
x_sol = np.array([1.0, 1.0, 1.0])
def gradient(x, A, b):
element_1 = np.dot(np.transpose(A),np.dot(A, x))
element_2 = np.dot(np.transpose(A), b)
return element_1 - element_2
def linear_solve(M, v, x_start, umbral, max_iter):
k = 0.002
for i in range(max_iter):
print(x_start)
x_start = x_start - k * gradient(x_start, M, v)
current_v = np.dot(M,x_start)
error_np = np.sum(np.abs(current_v-v))
if error_np < umbral:
return x_start
print(linear_solve(A_coef, b_coef, x_sol, 0.001, 10000))
'''
#def linear_solve(A, b, x_start, umbral = 0.001, max_iter = 1000)
###
#Tasa de aprendizaje.
k = 0.002 #Parámetros de ajuste o hiperparámetros
for i in range(1000):
print(x_sol)
x_sol = x_sol - k * gradient(x_sol, A_coef, b_coef)
print(np.dot(A_coef,x_sol))
'''
|
[
"# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Oct 20 18:51:32 2021\r\n\r\n@author: Kathleen Lucía Torres Mancilla 298944\r\n Francisco Javier Vite Mimila 299043\r\n \r\nLinear Solve: Gradiente descendente\r\n\r\nDejar en una función el método del gradiente.\r\n\r\n\"\"\"\r\nimport numpy as np\r\n'''\r\nEl objetivo es resolver Ax = b\r\nEntonces para ello, hicimos lo siguiente:\r\n\r\nAx - b --> Discrepancia de un tanto para un x tanteado\r\n\r\nPor facilidad, definimos el error cuadrado total como\r\n\r\n(Ax - b)^2 \r\n\r\nQue matricialmente se puede expresar como:\r\n\r\n(Ax - b)' (Ax - b) = Error total cuadrado\r\n\r\nDerivamos y encontramos que:\r\n\r\ndE = 2A'Ax - 2A'b \r\n\r\nLa idea es minimizar el error total cuadrado, porque\r\nal hacerlo, llegaríamos al único mínimo del paraboloide\r\nque es cuando (Ax - b)^2 = 0 y entonces, esto implicaría\r\nque Ax = b y por ende x sería la solución del sistema de \r\necuaciones.\r\n'''\r\n\r\n#A = [[1,2,3],[1,2,3],[1,2,4]]\r\n\r\n#mi_producto(A, transpuesta(A))\r\n#x_nuevo = x_viejo - k * Gradiente\r\n \r\n#Sistema de ecuaciones que se va a resolver:\r\nA_coef = np.array([[2.0, 1.0, -3.0], [5.0, -4.0, 1.0], [1.0, -1.0, -4.0]])\r\nb_coef = np.array([7.0, -19.0, 4.0])\r\n\r\nx_sol = np.array([1.0, 1.0, 1.0])\r\n\r\n\r\ndef gradient(x, A, b):\r\n\telement_1 = np.dot(np.transpose(A),np.dot(A, x))\r\n\telement_2 = np.dot(np.transpose(A), b)\r\n\treturn element_1 - element_2\r\n\r\ndef linear_solve(M, v, x_start, umbral, max_iter):\r\n k = 0.002\r\n for i in range(max_iter):\r\n print(x_start)\r\n x_start = x_start - k * gradient(x_start, M, v)\r\n current_v = np.dot(M,x_start)\r\n error_np = np.sum(np.abs(current_v-v))\r\n if error_np < umbral:\r\n return x_start\r\n\r\nprint(linear_solve(A_coef, b_coef, x_sol, 0.001, 10000))\r\n'''\r\n#def linear_solve(A, b, x_start, umbral = 0.001, max_iter = 1000)\r\n###\r\n\r\n#Tasa de aprendizaje.\r\nk = 0.002 #Parámetros de ajuste o hiperparámetros\r\nfor i in range(1000):\r\n\tprint(x_sol)\r\n\tx_sol = x_sol - k * gradient(x_sol, A_coef, b_coef)\r\n\r\nprint(np.dot(A_coef,x_sol))\r\n'''\r\n",
"<docstring token>\nimport numpy as np\n<docstring token>\nA_coef = np.array([[2.0, 1.0, -3.0], [5.0, -4.0, 1.0], [1.0, -1.0, -4.0]])\nb_coef = np.array([7.0, -19.0, 4.0])\nx_sol = np.array([1.0, 1.0, 1.0])\n\n\ndef gradient(x, A, b):\n element_1 = np.dot(np.transpose(A), np.dot(A, x))\n element_2 = np.dot(np.transpose(A), b)\n return element_1 - element_2\n\n\ndef linear_solve(M, v, x_start, umbral, max_iter):\n k = 0.002\n for i in range(max_iter):\n print(x_start)\n x_start = x_start - k * gradient(x_start, M, v)\n current_v = np.dot(M, x_start)\n error_np = np.sum(np.abs(current_v - v))\n if error_np < umbral:\n return x_start\n\n\nprint(linear_solve(A_coef, b_coef, x_sol, 0.001, 10000))\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\nA_coef = np.array([[2.0, 1.0, -3.0], [5.0, -4.0, 1.0], [1.0, -1.0, -4.0]])\nb_coef = np.array([7.0, -19.0, 4.0])\nx_sol = np.array([1.0, 1.0, 1.0])\n\n\ndef gradient(x, A, b):\n element_1 = np.dot(np.transpose(A), np.dot(A, x))\n element_2 = np.dot(np.transpose(A), b)\n return element_1 - element_2\n\n\ndef linear_solve(M, v, x_start, umbral, max_iter):\n k = 0.002\n for i in range(max_iter):\n print(x_start)\n x_start = x_start - k * gradient(x_start, M, v)\n current_v = np.dot(M, x_start)\n error_np = np.sum(np.abs(current_v - v))\n if error_np < umbral:\n return x_start\n\n\nprint(linear_solve(A_coef, b_coef, x_sol, 0.001, 10000))\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n<assignment token>\n\n\ndef gradient(x, A, b):\n element_1 = np.dot(np.transpose(A), np.dot(A, x))\n element_2 = np.dot(np.transpose(A), b)\n return element_1 - element_2\n\n\ndef linear_solve(M, v, x_start, umbral, max_iter):\n k = 0.002\n for i in range(max_iter):\n print(x_start)\n x_start = x_start - k * gradient(x_start, M, v)\n current_v = np.dot(M, x_start)\n error_np = np.sum(np.abs(current_v - v))\n if error_np < umbral:\n return x_start\n\n\nprint(linear_solve(A_coef, b_coef, x_sol, 0.001, 10000))\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n<assignment token>\n\n\ndef gradient(x, A, b):\n element_1 = np.dot(np.transpose(A), np.dot(A, x))\n element_2 = np.dot(np.transpose(A), b)\n return element_1 - element_2\n\n\ndef linear_solve(M, v, x_start, umbral, max_iter):\n k = 0.002\n for i in range(max_iter):\n print(x_start)\n x_start = x_start - k * gradient(x_start, M, v)\n current_v = np.dot(M, x_start)\n error_np = np.sum(np.abs(current_v - v))\n if error_np < umbral:\n return x_start\n\n\n<code token>\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n<assignment token>\n<function token>\n\n\ndef linear_solve(M, v, x_start, umbral, max_iter):\n k = 0.002\n for i in range(max_iter):\n print(x_start)\n x_start = x_start - k * gradient(x_start, M, v)\n current_v = np.dot(M, x_start)\n error_np = np.sum(np.abs(current_v - v))\n if error_np < umbral:\n return x_start\n\n\n<code token>\n<docstring token>\n",
"<docstring token>\n<import token>\n<docstring token>\n<assignment token>\n<function token>\n<function token>\n<code token>\n<docstring token>\n"
] | false |
99,046 |
19700cd7719caa1c2f571c40db547065d097036e
|
#!/usr/bin/env python
import argparse
import subprocess
import math
def printHelp():
print('''
Usage:
divides *.xml into n jobs, and renders the b'th block.
For example,
python render.py -b 3 -n 10 teapot_0000.xml, teapot_0001.xml, ... teapot_0099.xml
renders out teapot_0020.xml ... teapot_0029.xml
to render out all the frames, just do python render teapot_*.xml
''')
def main():
parser = argparse.ArgumentParser(description='Parallel Render Mitsuba Frames to movie')
parser.add_argument('-i',type=int, help="bth block to render")
parser.add_argument('-b',type=int, help="number of blocks to render")
parser.add_argument('xmlFiles', type=str, nargs='+')
try:
args = parser.parse_args()
except:
printHelp()
i = args.i
b = args.b
xmlFiles = args.xmlFiles
n = len(xmlFiles)
x = int(math.ceil(n/b))
if i is not None and b is not None:
print("i=",i)
print("b=",b)
print("x=",x)
start = (i-1)*x
end = (i)*x if (i != b) else n # any rounding errors are assigned to the last block.
xmlFiles = xmlFiles[start:end]
# j - parallelize when applicable, suppress logs, dont overwrite existing images
cmds = ['mitsuba','-xp','8']
cmds.extend(xmlFiles)
print(cmds)
subprocess.call(cmds)
if __name__ == "__main__":
main()
|
[
"#!/usr/bin/env python\n\nimport argparse\nimport subprocess\nimport math\n\ndef printHelp():\n\tprint('''\nUsage: \ndivides *.xml into n jobs, and renders the b'th block.\nFor example, \n\npython render.py -b 3 -n 10 teapot_0000.xml, teapot_0001.xml, ... teapot_0099.xml\n\nrenders out teapot_0020.xml ... teapot_0029.xml \n\nto render out all the frames, just do python render teapot_*.xml\n''')\n\ndef main():\n\tparser = argparse.ArgumentParser(description='Parallel Render Mitsuba Frames to movie')\n\tparser.add_argument('-i',type=int, help=\"bth block to render\")\n\tparser.add_argument('-b',type=int, help=\"number of blocks to render\")\n\tparser.add_argument('xmlFiles', type=str, nargs='+')\n\t\n\ttry:\n\t\targs = parser.parse_args()\n\texcept:\n\t\tprintHelp()\n\n\ti = args.i\n\tb = args.b\n\txmlFiles = args.xmlFiles\n\tn = len(xmlFiles)\n\tx = int(math.ceil(n/b))\n\tif i is not None and b is not None:\n\t\tprint(\"i=\",i)\n\t\tprint(\"b=\",b)\n\t\tprint(\"x=\",x)\n\t\tstart = (i-1)*x\n\t\tend = (i)*x if (i != b) else n # any rounding errors are assigned to the last block.\n\t\txmlFiles = xmlFiles[start:end]\n\t\t\t\n\t# j - parallelize when applicable, suppress logs, dont overwrite existing images\n\tcmds = ['mitsuba','-xp','8']\n\tcmds.extend(xmlFiles)\n\tprint(cmds)\n\tsubprocess.call(cmds)\n\t\nif __name__ == \"__main__\":\n\tmain()",
"import argparse\nimport subprocess\nimport math\n\n\ndef printHelp():\n print(\n \"\"\"\nUsage: \ndivides *.xml into n jobs, and renders the b'th block.\nFor example, \n\npython render.py -b 3 -n 10 teapot_0000.xml, teapot_0001.xml, ... teapot_0099.xml\n\nrenders out teapot_0020.xml ... teapot_0029.xml \n\nto render out all the frames, just do python render teapot_*.xml\n\"\"\"\n )\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Parallel Render Mitsuba Frames to movie')\n parser.add_argument('-i', type=int, help='bth block to render')\n parser.add_argument('-b', type=int, help='number of blocks to render')\n parser.add_argument('xmlFiles', type=str, nargs='+')\n try:\n args = parser.parse_args()\n except:\n printHelp()\n i = args.i\n b = args.b\n xmlFiles = args.xmlFiles\n n = len(xmlFiles)\n x = int(math.ceil(n / b))\n if i is not None and b is not None:\n print('i=', i)\n print('b=', b)\n print('x=', x)\n start = (i - 1) * x\n end = i * x if i != b else n\n xmlFiles = xmlFiles[start:end]\n cmds = ['mitsuba', '-xp', '8']\n cmds.extend(xmlFiles)\n print(cmds)\n subprocess.call(cmds)\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\n\n\ndef printHelp():\n print(\n \"\"\"\nUsage: \ndivides *.xml into n jobs, and renders the b'th block.\nFor example, \n\npython render.py -b 3 -n 10 teapot_0000.xml, teapot_0001.xml, ... teapot_0099.xml\n\nrenders out teapot_0020.xml ... teapot_0029.xml \n\nto render out all the frames, just do python render teapot_*.xml\n\"\"\"\n )\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Parallel Render Mitsuba Frames to movie')\n parser.add_argument('-i', type=int, help='bth block to render')\n parser.add_argument('-b', type=int, help='number of blocks to render')\n parser.add_argument('xmlFiles', type=str, nargs='+')\n try:\n args = parser.parse_args()\n except:\n printHelp()\n i = args.i\n b = args.b\n xmlFiles = args.xmlFiles\n n = len(xmlFiles)\n x = int(math.ceil(n / b))\n if i is not None and b is not None:\n print('i=', i)\n print('b=', b)\n print('x=', x)\n start = (i - 1) * x\n end = i * x if i != b else n\n xmlFiles = xmlFiles[start:end]\n cmds = ['mitsuba', '-xp', '8']\n cmds.extend(xmlFiles)\n print(cmds)\n subprocess.call(cmds)\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\n\n\ndef printHelp():\n print(\n \"\"\"\nUsage: \ndivides *.xml into n jobs, and renders the b'th block.\nFor example, \n\npython render.py -b 3 -n 10 teapot_0000.xml, teapot_0001.xml, ... teapot_0099.xml\n\nrenders out teapot_0020.xml ... teapot_0029.xml \n\nto render out all the frames, just do python render teapot_*.xml\n\"\"\"\n )\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Parallel Render Mitsuba Frames to movie')\n parser.add_argument('-i', type=int, help='bth block to render')\n parser.add_argument('-b', type=int, help='number of blocks to render')\n parser.add_argument('xmlFiles', type=str, nargs='+')\n try:\n args = parser.parse_args()\n except:\n printHelp()\n i = args.i\n b = args.b\n xmlFiles = args.xmlFiles\n n = len(xmlFiles)\n x = int(math.ceil(n / b))\n if i is not None and b is not None:\n print('i=', i)\n print('b=', b)\n print('x=', x)\n start = (i - 1) * x\n end = i * x if i != b else n\n xmlFiles = xmlFiles[start:end]\n cmds = ['mitsuba', '-xp', '8']\n cmds.extend(xmlFiles)\n print(cmds)\n subprocess.call(cmds)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Parallel Render Mitsuba Frames to movie')\n parser.add_argument('-i', type=int, help='bth block to render')\n parser.add_argument('-b', type=int, help='number of blocks to render')\n parser.add_argument('xmlFiles', type=str, nargs='+')\n try:\n args = parser.parse_args()\n except:\n printHelp()\n i = args.i\n b = args.b\n xmlFiles = args.xmlFiles\n n = len(xmlFiles)\n x = int(math.ceil(n / b))\n if i is not None and b is not None:\n print('i=', i)\n print('b=', b)\n print('x=', x)\n start = (i - 1) * x\n end = i * x if i != b else n\n xmlFiles = xmlFiles[start:end]\n cmds = ['mitsuba', '-xp', '8']\n cmds.extend(xmlFiles)\n print(cmds)\n subprocess.call(cmds)\n\n\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<code token>\n"
] | false |
99,047 |
893fb5b1b39869dddc5f8303b5635289c8e13b5c
|
from BaseControlPlots import BaseControlPlots
from ROOT import TLorentzVector as TLV
from ROOT import TTree, TBranch
from itertools import combinations # to make jets combinations
from copy import copy
from fold import fold
from math import sqrt, cos, pi
#from reconstruct import max_b2b
from reconstruct import recoNeutrino, recoWlnu2Mt
# variables for in tree
tree_vars = [ "Njets20","Nbjets30",
"jet1Pt","jet2Pt",
"bjet1Pt","bjet2Pt",
"Pt_bb","Pt_bl","Pt_j1l",
"Pt_b1lnu", "Pt_b2lnu",
"Pt_jjl", "Pt_jjb1", "Pt_jjb2",
"leptonPt","MET",
"DeltaR_j1l","DeltaR_j2l",
"DeltaR_b1l","DeltaR_b2l",
"DeltaR_bb1","DeltaR_jj",
"DeltaR_jjl","DeltaR_jjb",
"DeltaPhi_j1lbb",
"DeltaPhi_lMET","DeltaPhi_jjlnu",
"M_bb_closest", "M_jjlnu",
"M_jjb1", "M_jjb2",
"M_b1lnu", "M_b2lnu",
"M_bl", "M_jjl",
"M_jj", "M_j1l",
"MT_lnu","MT_jjlnu" ]
# Requirements:
# event.muons
# event.electrons
class CleanUpControlPlots(BaseControlPlots):
"""A class to create control plots for leptons"""
def __init__(self, dir=None, dataset=None, mode="plots"):
# create output file if needed. If no file is given, it means it is delegated
BaseControlPlots.__init__(self, dir=dir, purpose="cleanup", dataset=dataset, mode=mode)
def beginJob(self):
# declare tree and branches
self.addTree("cleanup","Variables for MVA")
for var in tree_vars:
self.addBranch("cleanup",var)
self.add("Njets20","jets multiplicity (Pt > 20 GeV)",15,0,15)
self.add("Njets30","jets multiplicity (Pt > 30 GeV)",15,0,15)
self.add("Nbjets30","bjets multiplicity (Pt > 30 GeV)",5,0,5)
self.add("Nbjets30_cut_PUPPI","bjets multiplicity (Pt > 30 GeV)",5,0,5)
self.add("Nbjets30_cut_all","bjets multiplicity (Pt > 30 GeV)",5,0,5)
self.add("jet1Pt","leading jet Pt",100,0,250)
self.add("jet2Pt","second leading jet Pt",100,0,250)
self.add("bjet1Pt","leading b-jet Pt",100,0,250)
self.add("bjet2Pt","second leading b-jet Pt",100,0,250)
self.add("Pt_bb","closest bjets pair Pt",100,0,500)
self.add("Pt_bl","closest bjet-lepton Pt",100,0,500)
self.add("Pt_b1lnu","second closest bjet-lepton-neutrino Pt",100,0,500)
self.add("Pt_b2lnu","closest bjet-lepton-neutrino Pt",100,0,500)
self.add("Pt_j1l","closest jet-lepton Pt",100,0,500)
self.add("Pt_jjl","leading jets-lepton Pt",100,0,500)
self.add("Pt_jjb1","leading jets-bjet Pt",100,0,500)
self.add("Pt_jjb2","leading jets-bjet Pt",100,0,500)
self.add("Eta_bb","closest bjet pair Eta",100,0,500)
self.add("leptonPt","lepton Pt",100,0,250)
self.add("MET","MET",100,0,300)
self.add("M_jj","leading jet-jet Mass",100,0,300)
self.add("M_jjb1","hadronic top reco Mass",100,0,700)
self.add("M_jjb2","hadronic top reco Mass",100,0,700)
self.add2D("M_jjb_2D","M_jjb1 vs. M_jjb2",100,0,700,100,0,700)
self.add2D("M_jj_NPU","NPU vs. M_jj",80,0,300,80,80,200)
self.add("M_jjl","leading jets-lepton Mass",100,0,450)
self.add("M_jjlnu","leading jets-lepton-MET Mass",100,0,800)
self.add("M_j1l","closest jet-lepton Mass",100,0,450)
self.add("M_bb_leading","leading bjet-bjet Mass",100,0,300)
self.add("M_bb_closest","closest bjet-bjet Mass",100,0,300)
self.add("M_bb_farthest","farthest bjet-bjet Mass",100,0,300)
self.add("M_bl","closest bjet-lepton Mass",100,0,300)
self.add("MT_lnu","Wlnu Mt",100,0,200)
self.add("MT_jjlnu","HWW Mt",100,0,300)
self.add("M_b1lnu","leptonic top reco Mass",100,0,500)
self.add("M_b2lnu","leptonic top reco Mass",100,0,500)
self.add2D("M_blnu_2D","M_b1lnu vs. M_b2lnu",100,0,500,100,0,500)
self.add("DeltaR_jj","leading jet-jet DeltaR",100,0,4.5)
self.add("DeltaR_j1l","closest jet-lepton DeltaR",100,0,4)
self.add("DeltaR_j2l","2nd closest jet-lepton DeltaR",100,0,4)
self.add("DeltaR_jjl","leading jets-lepton DeltaR",100,0,4.5)
self.add("DeltaR_jjb","leading jets-bjet DeltaR",100,0,4.5)
self.add("DeltaR_j1lbb","closest jet-lepton-bjets DeltaR",100,0,4.5)
self.add("DeltaR_jjlbb","leading jets-lepton-bjets DeltaR",100,0,4.5)
self.add("DeltaR_jjbbl","leading jets-bjet-bjet-lepton DeltaR",100,0,4.5)
self.add("DeltaR_bb1","closest bjet-bjet pair DeltaR",100,0,4)
self.add("DeltaR_b1l","farthest bjet-lepton DeltaR",100,0,4)
self.add("DeltaR_b2l","2nd farthest bjet-lepton DeltaR",100,0,4)
self.add("DeltaPhi_jj","leading jet-jet DeltaPhi",100,0,3.5)
self.add("DeltaPhi_j1l","closest jet-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_j2l","2nd closest jet-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_jjl","leading jets-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_jjb","leading jets-bjet DeltaPhi",100,0,3.5)
self.add("DeltaPhi_j1lbb","closest jet-lepton-bjets DeltaPhi",100,0,3.5)
self.add("DeltaPhi_jjlbb","leading jets-lepton-bjets DeltaPhi",100,0,3.5)
self.add("DeltaPhi_jjbbl","leading jets-bjet-bjet-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_bb1","closest bjet-bjet pair DeltaPhi",100,0,3.5)
self.add("DeltaPhi_b1l","farthest bjet-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_b2l","2nd farthest bjet-lepton DeltaPhi",100,0,3.5)
self.add("DeltaPhi_lMET","lepton-MET DeltaPhi",100,0,3.5)
self.add("DeltaPhi_jjlnu","jets-lepton-MET DeltaPhi",100,0,3.5)
self.add2D("DeltaEtaDeltaPhi_jj","leading jet-jet DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_j1l","closest jet-lepton combination DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_j2l","2nd closest jet-lepton combination DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_j3l","3rd closest jet-lepton combination DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_jjl","leading jets-lepton DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_jjb","leading jets-bjet DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_j1lbb","closest jet-lepton-bjets DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_jjlbb","leading jets-lepton-bjets DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_jjbbl","leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_bb1","closest bjet-bjet DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_b1l","farthest bjet-lepton DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
self.add2D("DeltaEtaDeltaPhi_b2l","2nd farthest bjet-lepton DeltaPhi vs. DeltaEta",50,0,3.5,50,0,3.2)
# self.add2D("NVerticesNJets","all jet multiplicity vs. number vertices",20,100,190,15,0,15)
# self.add2D("NVerticesNPUPPIJets","PUPPI jet multiplicity vs. number vertices",20,100,190,15,0,15)
# get information
def process(self, event):
result = { }
jets = event.cleanedJets20[:] # remove closest b-jets pair down below
alljets = [ j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5 ]
bjets = event.bjets30[:]
result["Njets20"] = len(event.cleanedJets20)
result["Njets30"] = len(event.cleanedJets30)
result["Nbjets30"] = len(event.bjets30)
if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0].MET > 20:
result["Nbjets30_cut_PUPPI"] = len(event.bjets30)
result["Nbjets30_cut_all"] = len([ j for j in alljets if j.BTag and j.PT > 30 ])
NPU = event.npu[0]
# result["NVerticesNJets"] = [[ NPU.HT, len(alljets) ]]
# result["NVerticesNPUPPIJets"] = [[ NPU.HT, len(jets) ]]
lepton = None
p_neutrino = None
MET = event.met[0]
if len(event.leadingLeptons):
lepton = event.leadingLeptons[0]
p_neutrino = recoNeutrino(lepton.TLV,MET)
# bjet - bjet
bl = [ ]
p_bl = None
if lepton and bjets:
bl = sorted( bjets, key=lambda j: TLV.DeltaR(j.TLV,lepton.TLV), reverse=True ) # farthest->closest
DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))
DeltaEta = abs(lepton.Eta - bl[0].Eta)
p_bl = lepton.TLV+bl[-1].TLV
result["M_bl"] = p_bl.M() # closest b-jet with lepton
result["Pt_bl"] = p_bl.Pt()
result["DeltaR_b1l"] = TLV.DeltaR(lepton.TLV,bl[0].TLV)
result["DeltaPhi_b1l"] = DeltaPhi
result["DeltaEtaDeltaPhi_b1l"] = [[ DeltaEta, DeltaPhi ]]
if len(bl)>1:
DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))
DeltaEta = abs(lepton.Eta - bl[1].Eta)
result["M_bb_farthest"] = (bl[0].TLV+bl[1].TLV).M()
result["DeltaR_b2l"] = TLV.DeltaR(lepton.TLV,bl[1].TLV)
result["DeltaPhi_b2l"] = DeltaPhi
result["DeltaEtaDeltaPhi_b2l"] = [[ DeltaEta, DeltaPhi ]]
# bjet comb
DeltaR_bb_closest = 1000 # >> pi
bjet_closest = [ ]
p_bb1 = None
for j1, j2 in combinations(bjets,2):
p_bb = j1.TLV + j2.TLV
DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)
if DeltaR < DeltaR_bb_closest:
bjet_closest = [j1,j2]
p_bb1 = p_bb
result["M_bb_closest"] = p_bb.M()
result["Pt_bb"] = p_bb.Pt()
result["DeltaR_bb1"] = TLV.DeltaR(j1.TLV,j2.TLV)
result["DeltaPhi_bb1"] = fold(abs(j1.Phi - j2.Phi))
result["DeltaEtaDeltaPhi_bb1"] = [[ abs(j1.Eta - j2.Eta),
result["DeltaPhi_bb1"] ]]
DeltaR_bb_closest = DeltaR
if len(bjets)>1:
result["M_bb_leading"] = (bjets[0].TLV+bjets[1].TLV).M()
# leading non-b-jets
for bjet in bjet_closest: # remove closest bjet pair from jet list
jets.remove(bjet)
if len(jets)>0:
result["jet1Pt"] = jets[0].PT
if len(jets)>1:
result["jet2Pt"] = jets[1].PT
# leading bjets
if len(bjets)>1:
result["bjet1Pt"] = bjet_closest[0].PT
result["bjet2Pt"] = bjet_closest[1].PT
elif len(bjets):
result["bjet1Pt"] = bjets[0].PT
# jet comb
if len(jets)>1:
# 120 GeV upper mass limit
# jets120 = [ js for js in combinations(jets[:4],2) if (js[0].TLV+js[1].TLV).M() < 120 ]
# if len(jets120):
# jets = max( jets120, key = lambda js: (js[0].TLV+js[1].TLV).Pt())
p_jj = jets[0].TLV + jets[1].TLV
result["M_jj"] = p_jj.M()
result["DeltaR_jj"] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)
result["DeltaPhi_jj"] = fold(abs(jets[0].Phi - jets[1].Phi))
result["DeltaEtaDeltaPhi_jj"] = [[ abs(jets[0].Eta - jets[1].Eta),
result["DeltaPhi_jj"] ]]
result["M_jj_NPU"] = [[ p_jj.M(), NPU.HT ]]
# jjl
if lepton:
p_jjl = p_jj + lepton.TLV
result["M_jjl"] = p_jjl.M()
result["Pt_jjl"] = p_jjl.Pt()
result["M_jjlnu"] = (p_jj + lepton.TLV + p_neutrino).M()
result["DeltaR_jjl"] = TLV.DeltaR(p_jj,lepton.TLV)
result["DeltaPhi_jjl"] = fold(abs(p_jj.Phi()-lepton.Phi))
result["DeltaEtaDeltaPhi_jjl"] = [[ abs(p_jj.Eta() - lepton.Eta),
result["DeltaPhi_jjl"] ]]
result["DeltaPhi_jjlnu"] = fold(abs(p_jjl.Phi()-MET.Phi))
result["MT_jjlnu"] = sqrt(2 * MET.MET * p_jjl.Pt() * (1-cos( p_jjl.Phi() - MET.Phi)) )
if len(bl)>1:
p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino
p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino
result["M_b1lnu"] = p_blnu.M()
result["M_b2lnu"] = p_b2lnu.M() # take bjet closest
result["M_blnu_2D"] = [[ result["M_b1lnu"], result["M_b2lnu"] ]]
result["Pt_b1lnu"] = p_blnu.Pt()
result["Pt_b2lnu"] = p_b2lnu.Pt()
if len(event.cleanedJets20)>3: # take bjet second closest to lepton
jets_tt = event.cleanedJets20[:]
jets_tt.remove(bl[-1])
jets_tt.remove(bl[-2])
# 120 GeV upper mass limit
# jets120 = [ js for js in combinations(jets_tt[:4],2) if (js[0].TLV+js[1].TLV).M() < 120 ]
# if len(jets120):
# jets_tt = max( jets120, key = lambda js: (js[0].TLV+js[1].TLV).Pt())
p_jj = jets_tt[0].TLV + jets_tt[1].TLV
p_jjb = p_jj + bl[-2].TLV
p_jjb2 = p_jj + bl[-1].TLV
result["M_jjl"] = p_jjl.M()
result["M_jjb1"] = p_jjb.M()
result["M_jjb2"] = p_jjb2.M()
result["M_jjb_2D"] = [[ result["M_jjb1"], result["M_jjb2"] ]]
result["Pt_jjb1"] = p_jjb.Pt()
result["Pt_jjb2"] = p_jjb2.Pt()
result["DeltaR_jjb"] = TLV.DeltaR(p_jj,bl[-2].TLV)
result["DeltaPhi_jjb"] = fold(abs(p_jj.Phi()-bl[-2].Phi))
result["DeltaEtaDeltaPhi_jjb"] = [[ abs(p_jj.Eta() - bl[-2].Eta),
result["DeltaPhi_jjb"] ]]
result["DeltaR_jjlbb"] = TLV.DeltaR(p_jjl,p_bb1)
result["DeltaPhi_jjlbb"] = fold(abs(p_jjl.Phi()-p_bb1.Phi()))
result["DeltaEtaDeltaPhi_jjlbb"] = [[ abs(p_jjl.Eta() - p_bb1.Eta()),
result["DeltaPhi_jjlbb"] ]]
result["DeltaR_jjbbl"] = TLV.DeltaR(p_jjb,p_bl)
result["DeltaPhi_jjbbl"] = fold(abs(p_jjb.Phi()-p_bl.Phi()))
result["DeltaEtaDeltaPhi_jjbbl"] = [[ abs(p_jjb.Eta() - p_bl.Eta()),
result["DeltaPhi_jjbbl"] ]]
if lepton:
# MET - lepton
result["leptonPt"] = lepton.PT
result["MET"] = MET.MET
result["DeltaPhi_lMET"] = abs(MET.Phi-lepton.Phi)
result["MT_lnu"] = recoWlnu2Mt(lepton,MET)
# jet i - lepton
ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV,lepton.TLV))[:3] # closest jets
if len(ji)>0 and p_bb1:
p_j1l = lepton.TLV+ji[0].TLV
result["M_j1l"] = p_j1l.M()
result["Pt_j1l"] = p_j1l.Pt()
result["DeltaR_j1l"] = TLV.DeltaR(lepton.TLV,ji[0].TLV)
result["DeltaPhi_j1l"] = fold(abs(lepton.Phi - ji[0].Phi))
result["DeltaEtaDeltaPhi_j1l"] = [[ abs(lepton.Eta - ji[0].Eta),
result["DeltaPhi_j1l"] ]]
result["DeltaR_j1lbb"] = TLV.DeltaR(p_j1l,p_bb1)
result["DeltaPhi_j1lbb"] = fold(abs(p_j1l.Phi()-p_bb1.Phi()))
result["DeltaEtaDeltaPhi_j1lbb"] = [[ abs(p_j1l.Eta() - p_bb1.Eta()),
result["DeltaPhi_j1lbb"] ]]
if len(ji)>1:
# result["M_j2l"] = (lepton.TLV+ji[1].TLV).M()
result["DeltaR_j2l"] = TLV.DeltaR(lepton.TLV,ji[1].TLV)
result["DeltaPhi_j2l"] = fold(abs(lepton.Phi - ji[1].Phi))
result["DeltaEtaDeltaPhi_j2l"] = [[ abs(lepton.Eta - ji[1].Eta),
result["DeltaPhi_j2l"] ]]
if len(ji)>2:
result["DeltaEtaDeltaPhi_j3l"] = [[ abs(lepton.Eta - ji[2].Eta),
fold(abs(lepton.Phi - ji[2].Phi)) ]]
# respect the order of branches when adding variables
# result["cleanup"] = [ result[var] for var in result if var in tree_vars ]
result["cleanup"] = [ ]
for var in tree_vars:
if var in result:
result["cleanup"].append(result[var])
else: # if one variable does not exist for this event, no tree
del result["cleanup"]
break
return result
if __name__=="__main__":
import sys
from DelphesAnalysis.BaseControlPlots import runTest
runTest(sys.argv[1], CleanUpControlPlots())
|
[
"from BaseControlPlots import BaseControlPlots\nfrom ROOT import TLorentzVector as TLV\nfrom ROOT import TTree, TBranch\nfrom itertools import combinations # to make jets combinations\nfrom copy import copy\nfrom fold import fold\nfrom math import sqrt, cos, pi\n#from reconstruct import max_b2b\nfrom reconstruct import recoNeutrino, recoWlnu2Mt\n\n# variables for in tree\ntree_vars = [ \"Njets20\",\"Nbjets30\",\n \"jet1Pt\",\"jet2Pt\",\n \"bjet1Pt\",\"bjet2Pt\",\n \"Pt_bb\",\"Pt_bl\",\"Pt_j1l\",\n \"Pt_b1lnu\", \"Pt_b2lnu\",\n \"Pt_jjl\", \"Pt_jjb1\", \"Pt_jjb2\",\n \"leptonPt\",\"MET\",\n \"DeltaR_j1l\",\"DeltaR_j2l\",\n \"DeltaR_b1l\",\"DeltaR_b2l\",\n \"DeltaR_bb1\",\"DeltaR_jj\",\n \"DeltaR_jjl\",\"DeltaR_jjb\",\n \"DeltaPhi_j1lbb\",\n \"DeltaPhi_lMET\",\"DeltaPhi_jjlnu\",\n \"M_bb_closest\", \"M_jjlnu\",\n \"M_jjb1\", \"M_jjb2\",\n \"M_b1lnu\", \"M_b2lnu\",\n \"M_bl\", \"M_jjl\",\n \"M_jj\", \"M_j1l\",\n \"MT_lnu\",\"MT_jjlnu\" ]\n\n# Requirements:\n# event.muons\n# event.electrons\n\nclass CleanUpControlPlots(BaseControlPlots):\n \"\"\"A class to create control plots for leptons\"\"\"\n\n def __init__(self, dir=None, dataset=None, mode=\"plots\"):\n # create output file if needed. If no file is given, it means it is delegated\n BaseControlPlots.__init__(self, dir=dir, purpose=\"cleanup\", dataset=dataset, mode=mode)\n\n def beginJob(self):\n \n # declare tree and branches\n self.addTree(\"cleanup\",\"Variables for MVA\")\n for var in tree_vars:\n self.addBranch(\"cleanup\",var)\n\n self.add(\"Njets20\",\"jets multiplicity (Pt > 20 GeV)\",15,0,15)\n self.add(\"Njets30\",\"jets multiplicity (Pt > 30 GeV)\",15,0,15)\n self.add(\"Nbjets30\",\"bjets multiplicity (Pt > 30 GeV)\",5,0,5)\n self.add(\"Nbjets30_cut_PUPPI\",\"bjets multiplicity (Pt > 30 GeV)\",5,0,5)\n self.add(\"Nbjets30_cut_all\",\"bjets multiplicity (Pt > 30 GeV)\",5,0,5)\n \n self.add(\"jet1Pt\",\"leading jet Pt\",100,0,250)\n self.add(\"jet2Pt\",\"second leading jet Pt\",100,0,250)\n self.add(\"bjet1Pt\",\"leading b-jet Pt\",100,0,250)\n self.add(\"bjet2Pt\",\"second leading b-jet Pt\",100,0,250)\n self.add(\"Pt_bb\",\"closest bjets pair Pt\",100,0,500)\n self.add(\"Pt_bl\",\"closest bjet-lepton Pt\",100,0,500)\n self.add(\"Pt_b1lnu\",\"second closest bjet-lepton-neutrino Pt\",100,0,500)\n self.add(\"Pt_b2lnu\",\"closest bjet-lepton-neutrino Pt\",100,0,500)\n self.add(\"Pt_j1l\",\"closest jet-lepton Pt\",100,0,500)\n self.add(\"Pt_jjl\",\"leading jets-lepton Pt\",100,0,500)\n self.add(\"Pt_jjb1\",\"leading jets-bjet Pt\",100,0,500)\n self.add(\"Pt_jjb2\",\"leading jets-bjet Pt\",100,0,500)\n self.add(\"Eta_bb\",\"closest bjet pair Eta\",100,0,500)\n self.add(\"leptonPt\",\"lepton Pt\",100,0,250)\n self.add(\"MET\",\"MET\",100,0,300)\n\n self.add(\"M_jj\",\"leading jet-jet Mass\",100,0,300)\n self.add(\"M_jjb1\",\"hadronic top reco Mass\",100,0,700)\n self.add(\"M_jjb2\",\"hadronic top reco Mass\",100,0,700)\n self.add2D(\"M_jjb_2D\",\"M_jjb1 vs. M_jjb2\",100,0,700,100,0,700)\n self.add2D(\"M_jj_NPU\",\"NPU vs. M_jj\",80,0,300,80,80,200)\n \n\n self.add(\"M_jjl\",\"leading jets-lepton Mass\",100,0,450)\n self.add(\"M_jjlnu\",\"leading jets-lepton-MET Mass\",100,0,800)\n self.add(\"M_j1l\",\"closest jet-lepton Mass\",100,0,450)\n self.add(\"M_bb_leading\",\"leading bjet-bjet Mass\",100,0,300)\n self.add(\"M_bb_closest\",\"closest bjet-bjet Mass\",100,0,300)\n self.add(\"M_bb_farthest\",\"farthest bjet-bjet Mass\",100,0,300)\n self.add(\"M_bl\",\"closest bjet-lepton Mass\",100,0,300)\n self.add(\"MT_lnu\",\"Wlnu Mt\",100,0,200)\n self.add(\"MT_jjlnu\",\"HWW Mt\",100,0,300)\n self.add(\"M_b1lnu\",\"leptonic top reco Mass\",100,0,500)\n self.add(\"M_b2lnu\",\"leptonic top reco Mass\",100,0,500)\n self.add2D(\"M_blnu_2D\",\"M_b1lnu vs. M_b2lnu\",100,0,500,100,0,500)\n \n self.add(\"DeltaR_jj\",\"leading jet-jet DeltaR\",100,0,4.5)\n self.add(\"DeltaR_j1l\",\"closest jet-lepton DeltaR\",100,0,4)\n self.add(\"DeltaR_j2l\",\"2nd closest jet-lepton DeltaR\",100,0,4)\n self.add(\"DeltaR_jjl\",\"leading jets-lepton DeltaR\",100,0,4.5)\n self.add(\"DeltaR_jjb\",\"leading jets-bjet DeltaR\",100,0,4.5)\n self.add(\"DeltaR_j1lbb\",\"closest jet-lepton-bjets DeltaR\",100,0,4.5)\n self.add(\"DeltaR_jjlbb\",\"leading jets-lepton-bjets DeltaR\",100,0,4.5)\n self.add(\"DeltaR_jjbbl\",\"leading jets-bjet-bjet-lepton DeltaR\",100,0,4.5)\n self.add(\"DeltaR_bb1\",\"closest bjet-bjet pair DeltaR\",100,0,4)\n self.add(\"DeltaR_b1l\",\"farthest bjet-lepton DeltaR\",100,0,4)\n self.add(\"DeltaR_b2l\",\"2nd farthest bjet-lepton DeltaR\",100,0,4)\n\n self.add(\"DeltaPhi_jj\",\"leading jet-jet DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_j1l\",\"closest jet-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_j2l\",\"2nd closest jet-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_jjl\",\"leading jets-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_jjb\",\"leading jets-bjet DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_j1lbb\",\"closest jet-lepton-bjets DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_jjlbb\",\"leading jets-lepton-bjets DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_jjbbl\",\"leading jets-bjet-bjet-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_bb1\",\"closest bjet-bjet pair DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_b1l\",\"farthest bjet-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_b2l\",\"2nd farthest bjet-lepton DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_lMET\",\"lepton-MET DeltaPhi\",100,0,3.5)\n self.add(\"DeltaPhi_jjlnu\",\"jets-lepton-MET DeltaPhi\",100,0,3.5)\n\n self.add2D(\"DeltaEtaDeltaPhi_jj\",\"leading jet-jet DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_j1l\",\"closest jet-lepton combination DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_j2l\",\"2nd closest jet-lepton combination DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_j3l\",\"3rd closest jet-lepton combination DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_jjl\",\"leading jets-lepton DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_jjb\",\"leading jets-bjet DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_j1lbb\",\"closest jet-lepton-bjets DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_jjlbb\",\"leading jets-lepton-bjets DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_jjbbl\",\"leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_bb1\",\"closest bjet-bjet DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_b1l\",\"farthest bjet-lepton DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n self.add2D(\"DeltaEtaDeltaPhi_b2l\",\"2nd farthest bjet-lepton DeltaPhi vs. DeltaEta\",50,0,3.5,50,0,3.2)\n\n# self.add2D(\"NVerticesNJets\",\"all jet multiplicity vs. number vertices\",20,100,190,15,0,15)\n# self.add2D(\"NVerticesNPUPPIJets\",\"PUPPI jet multiplicity vs. number vertices\",20,100,190,15,0,15)\n\n\n\n # get information\n def process(self, event):\n \n result = { }\n \n jets = event.cleanedJets20[:] # remove closest b-jets pair down below\n alljets = [ j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5 ]\n bjets = event.bjets30[:]\n result[\"Njets20\"] = len(event.cleanedJets20)\n result[\"Njets30\"] = len(event.cleanedJets30)\n result[\"Nbjets30\"] = len(event.bjets30)\n \n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0].MET > 20: \n result[\"Nbjets30_cut_PUPPI\"] = len(event.bjets30) \n result[\"Nbjets30_cut_all\"] = len([ j for j in alljets if j.BTag and j.PT > 30 ])\n \n NPU = event.npu[0]\n# result[\"NVerticesNJets\"] = [[ NPU.HT, len(alljets) ]]\n# result[\"NVerticesNPUPPIJets\"] = [[ NPU.HT, len(jets) ]]\n\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV,MET)\n\n # bjet - bjet\n bl = [ ]\n p_bl = None\n if lepton and bjets:\n bl = sorted( bjets, key=lambda j: TLV.DeltaR(j.TLV,lepton.TLV), reverse=True ) # farthest->closest\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV+bl[-1].TLV\n result[\"M_bl\"] = p_bl.M() # closest b-jet with lepton\n result[\"Pt_bl\"] = p_bl.Pt()\n result[\"DeltaR_b1l\"] = TLV.DeltaR(lepton.TLV,bl[0].TLV)\n result[\"DeltaPhi_b1l\"] = DeltaPhi\n result[\"DeltaEtaDeltaPhi_b1l\"] = [[ DeltaEta, DeltaPhi ]]\n if len(bl)>1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result[\"M_bb_farthest\"] = (bl[0].TLV+bl[1].TLV).M()\n result[\"DeltaR_b2l\"] = TLV.DeltaR(lepton.TLV,bl[1].TLV)\n result[\"DeltaPhi_b2l\"] = DeltaPhi\n result[\"DeltaEtaDeltaPhi_b2l\"] = [[ DeltaEta, DeltaPhi ]]\n \n # bjet comb\n DeltaR_bb_closest = 1000 # >> pi\n bjet_closest = [ ]\n p_bb1 = None\n for j1, j2 in combinations(bjets,2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1,j2]\n p_bb1 = p_bb\n result[\"M_bb_closest\"] = p_bb.M()\n result[\"Pt_bb\"] = p_bb.Pt()\n result[\"DeltaR_bb1\"] = TLV.DeltaR(j1.TLV,j2.TLV)\n result[\"DeltaPhi_bb1\"] = fold(abs(j1.Phi - j2.Phi))\n result[\"DeltaEtaDeltaPhi_bb1\"] = [[ abs(j1.Eta - j2.Eta),\n result[\"DeltaPhi_bb1\"] ]]\n DeltaR_bb_closest = DeltaR\n \n if len(bjets)>1:\n result[\"M_bb_leading\"] = (bjets[0].TLV+bjets[1].TLV).M()\n\n # leading non-b-jets\n for bjet in bjet_closest: # remove closest bjet pair from jet list\n jets.remove(bjet)\n if len(jets)>0:\n result[\"jet1Pt\"] = jets[0].PT\n if len(jets)>1:\n result[\"jet2Pt\"] = jets[1].PT\n \n # leading bjets\n if len(bjets)>1:\n result[\"bjet1Pt\"] = bjet_closest[0].PT\n result[\"bjet2Pt\"] = bjet_closest[1].PT\n elif len(bjets):\n result[\"bjet1Pt\"] = bjets[0].PT\n\n\n # jet comb\n if len(jets)>1:\n \n # 120 GeV upper mass limit\n# jets120 = [ js for js in combinations(jets[:4],2) if (js[0].TLV+js[1].TLV).M() < 120 ]\n# if len(jets120):\n# jets = max( jets120, key = lambda js: (js[0].TLV+js[1].TLV).Pt())\n \n p_jj = jets[0].TLV + jets[1].TLV\n result[\"M_jj\"] = p_jj.M()\n result[\"DeltaR_jj\"] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result[\"DeltaPhi_jj\"] = fold(abs(jets[0].Phi - jets[1].Phi))\n result[\"DeltaEtaDeltaPhi_jj\"] = [[ abs(jets[0].Eta - jets[1].Eta),\n result[\"DeltaPhi_jj\"] ]]\n result[\"M_jj_NPU\"] = [[ p_jj.M(), NPU.HT ]]\n \n # jjl\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result[\"M_jjl\"] = p_jjl.M()\n result[\"Pt_jjl\"] = p_jjl.Pt()\n result[\"M_jjlnu\"] = (p_jj + lepton.TLV + p_neutrino).M()\n result[\"DeltaR_jjl\"] = TLV.DeltaR(p_jj,lepton.TLV)\n result[\"DeltaPhi_jjl\"] = fold(abs(p_jj.Phi()-lepton.Phi))\n result[\"DeltaEtaDeltaPhi_jjl\"] = [[ abs(p_jj.Eta() - lepton.Eta),\n result[\"DeltaPhi_jjl\"] ]]\n result[\"DeltaPhi_jjlnu\"] = fold(abs(p_jjl.Phi()-MET.Phi))\n result[\"MT_jjlnu\"] = sqrt(2 * MET.MET * p_jjl.Pt() * (1-cos( p_jjl.Phi() - MET.Phi)) )\n if len(bl)>1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result[\"M_b1lnu\"] = p_blnu.M()\n result[\"M_b2lnu\"] = p_b2lnu.M() # take bjet closest\n result[\"M_blnu_2D\"] = [[ result[\"M_b1lnu\"], result[\"M_b2lnu\"] ]]\n result[\"Pt_b1lnu\"] = p_blnu.Pt()\n result[\"Pt_b2lnu\"] = p_b2lnu.Pt()\n if len(event.cleanedJets20)>3: # take bjet second closest to lepton\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n \n # 120 GeV upper mass limit\n# jets120 = [ js for js in combinations(jets_tt[:4],2) if (js[0].TLV+js[1].TLV).M() < 120 ]\n# if len(jets120):\n# jets_tt = max( jets120, key = lambda js: (js[0].TLV+js[1].TLV).Pt())\n \n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result[\"M_jjl\"] = p_jjl.M()\n result[\"M_jjb1\"] = p_jjb.M()\n result[\"M_jjb2\"] = p_jjb2.M()\n result[\"M_jjb_2D\"] = [[ result[\"M_jjb1\"], result[\"M_jjb2\"] ]]\n result[\"Pt_jjb1\"] = p_jjb.Pt()\n result[\"Pt_jjb2\"] = p_jjb2.Pt()\n result[\"DeltaR_jjb\"] = TLV.DeltaR(p_jj,bl[-2].TLV)\n result[\"DeltaPhi_jjb\"] = fold(abs(p_jj.Phi()-bl[-2].Phi))\n result[\"DeltaEtaDeltaPhi_jjb\"] = [[ abs(p_jj.Eta() - bl[-2].Eta),\n result[\"DeltaPhi_jjb\"] ]]\n result[\"DeltaR_jjlbb\"] = TLV.DeltaR(p_jjl,p_bb1)\n result[\"DeltaPhi_jjlbb\"] = fold(abs(p_jjl.Phi()-p_bb1.Phi()))\n result[\"DeltaEtaDeltaPhi_jjlbb\"] = [[ abs(p_jjl.Eta() - p_bb1.Eta()),\n result[\"DeltaPhi_jjlbb\"] ]]\n result[\"DeltaR_jjbbl\"] = TLV.DeltaR(p_jjb,p_bl)\n result[\"DeltaPhi_jjbbl\"] = fold(abs(p_jjb.Phi()-p_bl.Phi()))\n result[\"DeltaEtaDeltaPhi_jjbbl\"] = [[ abs(p_jjb.Eta() - p_bl.Eta()),\n result[\"DeltaPhi_jjbbl\"] ]]\n \n \n if lepton:\n \n # MET - lepton\n result[\"leptonPt\"] = lepton.PT\n result[\"MET\"] = MET.MET\n result[\"DeltaPhi_lMET\"] = abs(MET.Phi-lepton.Phi)\n result[\"MT_lnu\"] = recoWlnu2Mt(lepton,MET)\n \n # jet i - lepton\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV,lepton.TLV))[:3] # closest jets\n if len(ji)>0 and p_bb1:\n p_j1l = lepton.TLV+ji[0].TLV\n result[\"M_j1l\"] = p_j1l.M()\n result[\"Pt_j1l\"] = p_j1l.Pt()\n result[\"DeltaR_j1l\"] = TLV.DeltaR(lepton.TLV,ji[0].TLV)\n result[\"DeltaPhi_j1l\"] = fold(abs(lepton.Phi - ji[0].Phi))\n result[\"DeltaEtaDeltaPhi_j1l\"] = [[ abs(lepton.Eta - ji[0].Eta),\n result[\"DeltaPhi_j1l\"] ]]\n result[\"DeltaR_j1lbb\"] = TLV.DeltaR(p_j1l,p_bb1)\n result[\"DeltaPhi_j1lbb\"] = fold(abs(p_j1l.Phi()-p_bb1.Phi()))\n result[\"DeltaEtaDeltaPhi_j1lbb\"] = [[ abs(p_j1l.Eta() - p_bb1.Eta()),\n result[\"DeltaPhi_j1lbb\"] ]]\n \n if len(ji)>1:\n# result[\"M_j2l\"] = (lepton.TLV+ji[1].TLV).M()\n result[\"DeltaR_j2l\"] = TLV.DeltaR(lepton.TLV,ji[1].TLV)\n result[\"DeltaPhi_j2l\"] = fold(abs(lepton.Phi - ji[1].Phi))\n result[\"DeltaEtaDeltaPhi_j2l\"] = [[ abs(lepton.Eta - ji[1].Eta),\n result[\"DeltaPhi_j2l\"] ]]\n if len(ji)>2:\n result[\"DeltaEtaDeltaPhi_j3l\"] = [[ abs(lepton.Eta - ji[2].Eta),\n fold(abs(lepton.Phi - ji[2].Phi)) ]]\n \n \n # respect the order of branches when adding variables\n# result[\"cleanup\"] = [ result[var] for var in result if var in tree_vars ]\n result[\"cleanup\"] = [ ]\n for var in tree_vars:\n if var in result:\n result[\"cleanup\"].append(result[var])\n else: # if one variable does not exist for this event, no tree\n del result[\"cleanup\"]\n break\n\n return result\n\n\n\n\nif __name__==\"__main__\":\n import sys\n from DelphesAnalysis.BaseControlPlots import runTest\n runTest(sys.argv[1], CleanUpControlPlots())\n\n",
"from BaseControlPlots import BaseControlPlots\nfrom ROOT import TLorentzVector as TLV\nfrom ROOT import TTree, TBranch\nfrom itertools import combinations\nfrom copy import copy\nfrom fold import fold\nfrom math import sqrt, cos, pi\nfrom reconstruct import recoNeutrino, recoWlnu2Mt\ntree_vars = ['Njets20', 'Nbjets30', 'jet1Pt', 'jet2Pt', 'bjet1Pt',\n 'bjet2Pt', 'Pt_bb', 'Pt_bl', 'Pt_j1l', 'Pt_b1lnu', 'Pt_b2lnu', 'Pt_jjl',\n 'Pt_jjb1', 'Pt_jjb2', 'leptonPt', 'MET', 'DeltaR_j1l', 'DeltaR_j2l',\n 'DeltaR_b1l', 'DeltaR_b2l', 'DeltaR_bb1', 'DeltaR_jj', 'DeltaR_jjl',\n 'DeltaR_jjb', 'DeltaPhi_j1lbb', 'DeltaPhi_lMET', 'DeltaPhi_jjlnu',\n 'M_bb_closest', 'M_jjlnu', 'M_jjb1', 'M_jjb2', 'M_b1lnu', 'M_b2lnu',\n 'M_bl', 'M_jjl', 'M_jj', 'M_j1l', 'MT_lnu', 'MT_jjlnu']\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n \"\"\"A class to create control plots for leptons\"\"\"\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n\n def beginJob(self):\n self.addTree('cleanup', 'Variables for MVA')\n for var in tree_vars:\n self.addBranch('cleanup', var)\n self.add('Njets20', 'jets multiplicity (Pt > 20 GeV)', 15, 0, 15)\n self.add('Njets30', 'jets multiplicity (Pt > 30 GeV)', 15, 0, 15)\n self.add('Nbjets30', 'bjets multiplicity (Pt > 30 GeV)', 5, 0, 5)\n self.add('Nbjets30_cut_PUPPI', 'bjets multiplicity (Pt > 30 GeV)', \n 5, 0, 5)\n self.add('Nbjets30_cut_all', 'bjets multiplicity (Pt > 30 GeV)', 5,\n 0, 5)\n self.add('jet1Pt', 'leading jet Pt', 100, 0, 250)\n self.add('jet2Pt', 'second leading jet Pt', 100, 0, 250)\n self.add('bjet1Pt', 'leading b-jet Pt', 100, 0, 250)\n self.add('bjet2Pt', 'second leading b-jet Pt', 100, 0, 250)\n self.add('Pt_bb', 'closest bjets pair Pt', 100, 0, 500)\n self.add('Pt_bl', 'closest bjet-lepton Pt', 100, 0, 500)\n self.add('Pt_b1lnu', 'second closest bjet-lepton-neutrino Pt', 100,\n 0, 500)\n self.add('Pt_b2lnu', 'closest bjet-lepton-neutrino Pt', 100, 0, 500)\n self.add('Pt_j1l', 'closest jet-lepton Pt', 100, 0, 500)\n self.add('Pt_jjl', 'leading jets-lepton Pt', 100, 0, 500)\n self.add('Pt_jjb1', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Pt_jjb2', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Eta_bb', 'closest bjet pair Eta', 100, 0, 500)\n self.add('leptonPt', 'lepton Pt', 100, 0, 250)\n self.add('MET', 'MET', 100, 0, 300)\n self.add('M_jj', 'leading jet-jet Mass', 100, 0, 300)\n self.add('M_jjb1', 'hadronic top reco Mass', 100, 0, 700)\n self.add('M_jjb2', 'hadronic top reco Mass', 100, 0, 700)\n self.add2D('M_jjb_2D', 'M_jjb1 vs. M_jjb2', 100, 0, 700, 100, 0, 700)\n self.add2D('M_jj_NPU', 'NPU vs. M_jj', 80, 0, 300, 80, 80, 200)\n self.add('M_jjl', 'leading jets-lepton Mass', 100, 0, 450)\n self.add('M_jjlnu', 'leading jets-lepton-MET Mass', 100, 0, 800)\n self.add('M_j1l', 'closest jet-lepton Mass', 100, 0, 450)\n self.add('M_bb_leading', 'leading bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_closest', 'closest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_farthest', 'farthest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bl', 'closest bjet-lepton Mass', 100, 0, 300)\n self.add('MT_lnu', 'Wlnu Mt', 100, 0, 200)\n self.add('MT_jjlnu', 'HWW Mt', 100, 0, 300)\n self.add('M_b1lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add('M_b2lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add2D('M_blnu_2D', 'M_b1lnu vs. M_b2lnu', 100, 0, 500, 100, 0, 500\n )\n self.add('DeltaR_jj', 'leading jet-jet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1l', 'closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_j2l', '2nd closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_jjl', 'leading jets-lepton DeltaR', 100, 0, 4.5)\n self.add('DeltaR_jjb', 'leading jets-bjet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1lbb', 'closest jet-lepton-bjets DeltaR', 100, 0, 4.5\n )\n self.add('DeltaR_jjlbb', 'leading jets-lepton-bjets DeltaR', 100, 0,\n 4.5)\n self.add('DeltaR_jjbbl', 'leading jets-bjet-bjet-lepton DeltaR', \n 100, 0, 4.5)\n self.add('DeltaR_bb1', 'closest bjet-bjet pair DeltaR', 100, 0, 4)\n self.add('DeltaR_b1l', 'farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_b2l', '2nd farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaPhi_jj', 'leading jet-jet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1l', 'closest jet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j2l', '2nd closest jet-lepton DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_jjl', 'leading jets-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjb', 'leading jets-bjet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1lbb', 'closest jet-lepton-bjets DeltaPhi', 100,\n 0, 3.5)\n self.add('DeltaPhi_jjlbb', 'leading jets-lepton-bjets DeltaPhi', \n 100, 0, 3.5)\n self.add('DeltaPhi_jjbbl', 'leading jets-bjet-bjet-lepton DeltaPhi',\n 100, 0, 3.5)\n self.add('DeltaPhi_bb1', 'closest bjet-bjet pair DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_b1l', 'farthest bjet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_b2l', '2nd farthest bjet-lepton DeltaPhi', 100, \n 0, 3.5)\n self.add('DeltaPhi_lMET', 'lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjlnu', 'jets-lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add2D('DeltaEtaDeltaPhi_jj',\n 'leading jet-jet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1l',\n 'closest jet-lepton combination DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j2l',\n '2nd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j3l',\n '3rd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjl',\n 'leading jets-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2\n )\n self.add2D('DeltaEtaDeltaPhi_jjb',\n 'leading jets-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1lbb',\n 'closest jet-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjlbb',\n 'leading jets-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjbbl',\n 'leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_bb1',\n 'closest bjet-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_b1l',\n 'farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0,\n 3.2)\n self.add2D('DeltaEtaDeltaPhi_b2l',\n '2nd farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\nif __name__ == '__main__':\n import sys\n from DelphesAnalysis.BaseControlPlots import runTest\n runTest(sys.argv[1], CleanUpControlPlots())\n",
"<import token>\ntree_vars = ['Njets20', 'Nbjets30', 'jet1Pt', 'jet2Pt', 'bjet1Pt',\n 'bjet2Pt', 'Pt_bb', 'Pt_bl', 'Pt_j1l', 'Pt_b1lnu', 'Pt_b2lnu', 'Pt_jjl',\n 'Pt_jjb1', 'Pt_jjb2', 'leptonPt', 'MET', 'DeltaR_j1l', 'DeltaR_j2l',\n 'DeltaR_b1l', 'DeltaR_b2l', 'DeltaR_bb1', 'DeltaR_jj', 'DeltaR_jjl',\n 'DeltaR_jjb', 'DeltaPhi_j1lbb', 'DeltaPhi_lMET', 'DeltaPhi_jjlnu',\n 'M_bb_closest', 'M_jjlnu', 'M_jjb1', 'M_jjb2', 'M_b1lnu', 'M_b2lnu',\n 'M_bl', 'M_jjl', 'M_jj', 'M_j1l', 'MT_lnu', 'MT_jjlnu']\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n \"\"\"A class to create control plots for leptons\"\"\"\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n\n def beginJob(self):\n self.addTree('cleanup', 'Variables for MVA')\n for var in tree_vars:\n self.addBranch('cleanup', var)\n self.add('Njets20', 'jets multiplicity (Pt > 20 GeV)', 15, 0, 15)\n self.add('Njets30', 'jets multiplicity (Pt > 30 GeV)', 15, 0, 15)\n self.add('Nbjets30', 'bjets multiplicity (Pt > 30 GeV)', 5, 0, 5)\n self.add('Nbjets30_cut_PUPPI', 'bjets multiplicity (Pt > 30 GeV)', \n 5, 0, 5)\n self.add('Nbjets30_cut_all', 'bjets multiplicity (Pt > 30 GeV)', 5,\n 0, 5)\n self.add('jet1Pt', 'leading jet Pt', 100, 0, 250)\n self.add('jet2Pt', 'second leading jet Pt', 100, 0, 250)\n self.add('bjet1Pt', 'leading b-jet Pt', 100, 0, 250)\n self.add('bjet2Pt', 'second leading b-jet Pt', 100, 0, 250)\n self.add('Pt_bb', 'closest bjets pair Pt', 100, 0, 500)\n self.add('Pt_bl', 'closest bjet-lepton Pt', 100, 0, 500)\n self.add('Pt_b1lnu', 'second closest bjet-lepton-neutrino Pt', 100,\n 0, 500)\n self.add('Pt_b2lnu', 'closest bjet-lepton-neutrino Pt', 100, 0, 500)\n self.add('Pt_j1l', 'closest jet-lepton Pt', 100, 0, 500)\n self.add('Pt_jjl', 'leading jets-lepton Pt', 100, 0, 500)\n self.add('Pt_jjb1', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Pt_jjb2', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Eta_bb', 'closest bjet pair Eta', 100, 0, 500)\n self.add('leptonPt', 'lepton Pt', 100, 0, 250)\n self.add('MET', 'MET', 100, 0, 300)\n self.add('M_jj', 'leading jet-jet Mass', 100, 0, 300)\n self.add('M_jjb1', 'hadronic top reco Mass', 100, 0, 700)\n self.add('M_jjb2', 'hadronic top reco Mass', 100, 0, 700)\n self.add2D('M_jjb_2D', 'M_jjb1 vs. M_jjb2', 100, 0, 700, 100, 0, 700)\n self.add2D('M_jj_NPU', 'NPU vs. M_jj', 80, 0, 300, 80, 80, 200)\n self.add('M_jjl', 'leading jets-lepton Mass', 100, 0, 450)\n self.add('M_jjlnu', 'leading jets-lepton-MET Mass', 100, 0, 800)\n self.add('M_j1l', 'closest jet-lepton Mass', 100, 0, 450)\n self.add('M_bb_leading', 'leading bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_closest', 'closest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_farthest', 'farthest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bl', 'closest bjet-lepton Mass', 100, 0, 300)\n self.add('MT_lnu', 'Wlnu Mt', 100, 0, 200)\n self.add('MT_jjlnu', 'HWW Mt', 100, 0, 300)\n self.add('M_b1lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add('M_b2lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add2D('M_blnu_2D', 'M_b1lnu vs. M_b2lnu', 100, 0, 500, 100, 0, 500\n )\n self.add('DeltaR_jj', 'leading jet-jet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1l', 'closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_j2l', '2nd closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_jjl', 'leading jets-lepton DeltaR', 100, 0, 4.5)\n self.add('DeltaR_jjb', 'leading jets-bjet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1lbb', 'closest jet-lepton-bjets DeltaR', 100, 0, 4.5\n )\n self.add('DeltaR_jjlbb', 'leading jets-lepton-bjets DeltaR', 100, 0,\n 4.5)\n self.add('DeltaR_jjbbl', 'leading jets-bjet-bjet-lepton DeltaR', \n 100, 0, 4.5)\n self.add('DeltaR_bb1', 'closest bjet-bjet pair DeltaR', 100, 0, 4)\n self.add('DeltaR_b1l', 'farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_b2l', '2nd farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaPhi_jj', 'leading jet-jet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1l', 'closest jet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j2l', '2nd closest jet-lepton DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_jjl', 'leading jets-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjb', 'leading jets-bjet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1lbb', 'closest jet-lepton-bjets DeltaPhi', 100,\n 0, 3.5)\n self.add('DeltaPhi_jjlbb', 'leading jets-lepton-bjets DeltaPhi', \n 100, 0, 3.5)\n self.add('DeltaPhi_jjbbl', 'leading jets-bjet-bjet-lepton DeltaPhi',\n 100, 0, 3.5)\n self.add('DeltaPhi_bb1', 'closest bjet-bjet pair DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_b1l', 'farthest bjet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_b2l', '2nd farthest bjet-lepton DeltaPhi', 100, \n 0, 3.5)\n self.add('DeltaPhi_lMET', 'lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjlnu', 'jets-lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add2D('DeltaEtaDeltaPhi_jj',\n 'leading jet-jet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1l',\n 'closest jet-lepton combination DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j2l',\n '2nd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j3l',\n '3rd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjl',\n 'leading jets-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2\n )\n self.add2D('DeltaEtaDeltaPhi_jjb',\n 'leading jets-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1lbb',\n 'closest jet-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjlbb',\n 'leading jets-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjbbl',\n 'leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_bb1',\n 'closest bjet-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_b1l',\n 'farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0,\n 3.2)\n self.add2D('DeltaEtaDeltaPhi_b2l',\n '2nd farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\nif __name__ == '__main__':\n import sys\n from DelphesAnalysis.BaseControlPlots import runTest\n runTest(sys.argv[1], CleanUpControlPlots())\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n \"\"\"A class to create control plots for leptons\"\"\"\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n\n def beginJob(self):\n self.addTree('cleanup', 'Variables for MVA')\n for var in tree_vars:\n self.addBranch('cleanup', var)\n self.add('Njets20', 'jets multiplicity (Pt > 20 GeV)', 15, 0, 15)\n self.add('Njets30', 'jets multiplicity (Pt > 30 GeV)', 15, 0, 15)\n self.add('Nbjets30', 'bjets multiplicity (Pt > 30 GeV)', 5, 0, 5)\n self.add('Nbjets30_cut_PUPPI', 'bjets multiplicity (Pt > 30 GeV)', \n 5, 0, 5)\n self.add('Nbjets30_cut_all', 'bjets multiplicity (Pt > 30 GeV)', 5,\n 0, 5)\n self.add('jet1Pt', 'leading jet Pt', 100, 0, 250)\n self.add('jet2Pt', 'second leading jet Pt', 100, 0, 250)\n self.add('bjet1Pt', 'leading b-jet Pt', 100, 0, 250)\n self.add('bjet2Pt', 'second leading b-jet Pt', 100, 0, 250)\n self.add('Pt_bb', 'closest bjets pair Pt', 100, 0, 500)\n self.add('Pt_bl', 'closest bjet-lepton Pt', 100, 0, 500)\n self.add('Pt_b1lnu', 'second closest bjet-lepton-neutrino Pt', 100,\n 0, 500)\n self.add('Pt_b2lnu', 'closest bjet-lepton-neutrino Pt', 100, 0, 500)\n self.add('Pt_j1l', 'closest jet-lepton Pt', 100, 0, 500)\n self.add('Pt_jjl', 'leading jets-lepton Pt', 100, 0, 500)\n self.add('Pt_jjb1', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Pt_jjb2', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Eta_bb', 'closest bjet pair Eta', 100, 0, 500)\n self.add('leptonPt', 'lepton Pt', 100, 0, 250)\n self.add('MET', 'MET', 100, 0, 300)\n self.add('M_jj', 'leading jet-jet Mass', 100, 0, 300)\n self.add('M_jjb1', 'hadronic top reco Mass', 100, 0, 700)\n self.add('M_jjb2', 'hadronic top reco Mass', 100, 0, 700)\n self.add2D('M_jjb_2D', 'M_jjb1 vs. M_jjb2', 100, 0, 700, 100, 0, 700)\n self.add2D('M_jj_NPU', 'NPU vs. M_jj', 80, 0, 300, 80, 80, 200)\n self.add('M_jjl', 'leading jets-lepton Mass', 100, 0, 450)\n self.add('M_jjlnu', 'leading jets-lepton-MET Mass', 100, 0, 800)\n self.add('M_j1l', 'closest jet-lepton Mass', 100, 0, 450)\n self.add('M_bb_leading', 'leading bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_closest', 'closest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_farthest', 'farthest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bl', 'closest bjet-lepton Mass', 100, 0, 300)\n self.add('MT_lnu', 'Wlnu Mt', 100, 0, 200)\n self.add('MT_jjlnu', 'HWW Mt', 100, 0, 300)\n self.add('M_b1lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add('M_b2lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add2D('M_blnu_2D', 'M_b1lnu vs. M_b2lnu', 100, 0, 500, 100, 0, 500\n )\n self.add('DeltaR_jj', 'leading jet-jet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1l', 'closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_j2l', '2nd closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_jjl', 'leading jets-lepton DeltaR', 100, 0, 4.5)\n self.add('DeltaR_jjb', 'leading jets-bjet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1lbb', 'closest jet-lepton-bjets DeltaR', 100, 0, 4.5\n )\n self.add('DeltaR_jjlbb', 'leading jets-lepton-bjets DeltaR', 100, 0,\n 4.5)\n self.add('DeltaR_jjbbl', 'leading jets-bjet-bjet-lepton DeltaR', \n 100, 0, 4.5)\n self.add('DeltaR_bb1', 'closest bjet-bjet pair DeltaR', 100, 0, 4)\n self.add('DeltaR_b1l', 'farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_b2l', '2nd farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaPhi_jj', 'leading jet-jet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1l', 'closest jet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j2l', '2nd closest jet-lepton DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_jjl', 'leading jets-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjb', 'leading jets-bjet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1lbb', 'closest jet-lepton-bjets DeltaPhi', 100,\n 0, 3.5)\n self.add('DeltaPhi_jjlbb', 'leading jets-lepton-bjets DeltaPhi', \n 100, 0, 3.5)\n self.add('DeltaPhi_jjbbl', 'leading jets-bjet-bjet-lepton DeltaPhi',\n 100, 0, 3.5)\n self.add('DeltaPhi_bb1', 'closest bjet-bjet pair DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_b1l', 'farthest bjet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_b2l', '2nd farthest bjet-lepton DeltaPhi', 100, \n 0, 3.5)\n self.add('DeltaPhi_lMET', 'lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjlnu', 'jets-lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add2D('DeltaEtaDeltaPhi_jj',\n 'leading jet-jet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1l',\n 'closest jet-lepton combination DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j2l',\n '2nd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j3l',\n '3rd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjl',\n 'leading jets-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2\n )\n self.add2D('DeltaEtaDeltaPhi_jjb',\n 'leading jets-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1lbb',\n 'closest jet-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjlbb',\n 'leading jets-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjbbl',\n 'leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_bb1',\n 'closest bjet-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_b1l',\n 'farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0,\n 3.2)\n self.add2D('DeltaEtaDeltaPhi_b2l',\n '2nd farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\nif __name__ == '__main__':\n import sys\n from DelphesAnalysis.BaseControlPlots import runTest\n runTest(sys.argv[1], CleanUpControlPlots())\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n \"\"\"A class to create control plots for leptons\"\"\"\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n\n def beginJob(self):\n self.addTree('cleanup', 'Variables for MVA')\n for var in tree_vars:\n self.addBranch('cleanup', var)\n self.add('Njets20', 'jets multiplicity (Pt > 20 GeV)', 15, 0, 15)\n self.add('Njets30', 'jets multiplicity (Pt > 30 GeV)', 15, 0, 15)\n self.add('Nbjets30', 'bjets multiplicity (Pt > 30 GeV)', 5, 0, 5)\n self.add('Nbjets30_cut_PUPPI', 'bjets multiplicity (Pt > 30 GeV)', \n 5, 0, 5)\n self.add('Nbjets30_cut_all', 'bjets multiplicity (Pt > 30 GeV)', 5,\n 0, 5)\n self.add('jet1Pt', 'leading jet Pt', 100, 0, 250)\n self.add('jet2Pt', 'second leading jet Pt', 100, 0, 250)\n self.add('bjet1Pt', 'leading b-jet Pt', 100, 0, 250)\n self.add('bjet2Pt', 'second leading b-jet Pt', 100, 0, 250)\n self.add('Pt_bb', 'closest bjets pair Pt', 100, 0, 500)\n self.add('Pt_bl', 'closest bjet-lepton Pt', 100, 0, 500)\n self.add('Pt_b1lnu', 'second closest bjet-lepton-neutrino Pt', 100,\n 0, 500)\n self.add('Pt_b2lnu', 'closest bjet-lepton-neutrino Pt', 100, 0, 500)\n self.add('Pt_j1l', 'closest jet-lepton Pt', 100, 0, 500)\n self.add('Pt_jjl', 'leading jets-lepton Pt', 100, 0, 500)\n self.add('Pt_jjb1', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Pt_jjb2', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Eta_bb', 'closest bjet pair Eta', 100, 0, 500)\n self.add('leptonPt', 'lepton Pt', 100, 0, 250)\n self.add('MET', 'MET', 100, 0, 300)\n self.add('M_jj', 'leading jet-jet Mass', 100, 0, 300)\n self.add('M_jjb1', 'hadronic top reco Mass', 100, 0, 700)\n self.add('M_jjb2', 'hadronic top reco Mass', 100, 0, 700)\n self.add2D('M_jjb_2D', 'M_jjb1 vs. M_jjb2', 100, 0, 700, 100, 0, 700)\n self.add2D('M_jj_NPU', 'NPU vs. M_jj', 80, 0, 300, 80, 80, 200)\n self.add('M_jjl', 'leading jets-lepton Mass', 100, 0, 450)\n self.add('M_jjlnu', 'leading jets-lepton-MET Mass', 100, 0, 800)\n self.add('M_j1l', 'closest jet-lepton Mass', 100, 0, 450)\n self.add('M_bb_leading', 'leading bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_closest', 'closest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_farthest', 'farthest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bl', 'closest bjet-lepton Mass', 100, 0, 300)\n self.add('MT_lnu', 'Wlnu Mt', 100, 0, 200)\n self.add('MT_jjlnu', 'HWW Mt', 100, 0, 300)\n self.add('M_b1lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add('M_b2lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add2D('M_blnu_2D', 'M_b1lnu vs. M_b2lnu', 100, 0, 500, 100, 0, 500\n )\n self.add('DeltaR_jj', 'leading jet-jet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1l', 'closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_j2l', '2nd closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_jjl', 'leading jets-lepton DeltaR', 100, 0, 4.5)\n self.add('DeltaR_jjb', 'leading jets-bjet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1lbb', 'closest jet-lepton-bjets DeltaR', 100, 0, 4.5\n )\n self.add('DeltaR_jjlbb', 'leading jets-lepton-bjets DeltaR', 100, 0,\n 4.5)\n self.add('DeltaR_jjbbl', 'leading jets-bjet-bjet-lepton DeltaR', \n 100, 0, 4.5)\n self.add('DeltaR_bb1', 'closest bjet-bjet pair DeltaR', 100, 0, 4)\n self.add('DeltaR_b1l', 'farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_b2l', '2nd farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaPhi_jj', 'leading jet-jet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1l', 'closest jet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j2l', '2nd closest jet-lepton DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_jjl', 'leading jets-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjb', 'leading jets-bjet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1lbb', 'closest jet-lepton-bjets DeltaPhi', 100,\n 0, 3.5)\n self.add('DeltaPhi_jjlbb', 'leading jets-lepton-bjets DeltaPhi', \n 100, 0, 3.5)\n self.add('DeltaPhi_jjbbl', 'leading jets-bjet-bjet-lepton DeltaPhi',\n 100, 0, 3.5)\n self.add('DeltaPhi_bb1', 'closest bjet-bjet pair DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_b1l', 'farthest bjet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_b2l', '2nd farthest bjet-lepton DeltaPhi', 100, \n 0, 3.5)\n self.add('DeltaPhi_lMET', 'lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjlnu', 'jets-lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add2D('DeltaEtaDeltaPhi_jj',\n 'leading jet-jet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1l',\n 'closest jet-lepton combination DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j2l',\n '2nd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j3l',\n '3rd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjl',\n 'leading jets-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2\n )\n self.add2D('DeltaEtaDeltaPhi_jjb',\n 'leading jets-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1lbb',\n 'closest jet-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjlbb',\n 'leading jets-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjbbl',\n 'leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_bb1',\n 'closest bjet-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_b1l',\n 'farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0,\n 3.2)\n self.add2D('DeltaEtaDeltaPhi_b2l',\n '2nd farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n <docstring token>\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n\n def beginJob(self):\n self.addTree('cleanup', 'Variables for MVA')\n for var in tree_vars:\n self.addBranch('cleanup', var)\n self.add('Njets20', 'jets multiplicity (Pt > 20 GeV)', 15, 0, 15)\n self.add('Njets30', 'jets multiplicity (Pt > 30 GeV)', 15, 0, 15)\n self.add('Nbjets30', 'bjets multiplicity (Pt > 30 GeV)', 5, 0, 5)\n self.add('Nbjets30_cut_PUPPI', 'bjets multiplicity (Pt > 30 GeV)', \n 5, 0, 5)\n self.add('Nbjets30_cut_all', 'bjets multiplicity (Pt > 30 GeV)', 5,\n 0, 5)\n self.add('jet1Pt', 'leading jet Pt', 100, 0, 250)\n self.add('jet2Pt', 'second leading jet Pt', 100, 0, 250)\n self.add('bjet1Pt', 'leading b-jet Pt', 100, 0, 250)\n self.add('bjet2Pt', 'second leading b-jet Pt', 100, 0, 250)\n self.add('Pt_bb', 'closest bjets pair Pt', 100, 0, 500)\n self.add('Pt_bl', 'closest bjet-lepton Pt', 100, 0, 500)\n self.add('Pt_b1lnu', 'second closest bjet-lepton-neutrino Pt', 100,\n 0, 500)\n self.add('Pt_b2lnu', 'closest bjet-lepton-neutrino Pt', 100, 0, 500)\n self.add('Pt_j1l', 'closest jet-lepton Pt', 100, 0, 500)\n self.add('Pt_jjl', 'leading jets-lepton Pt', 100, 0, 500)\n self.add('Pt_jjb1', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Pt_jjb2', 'leading jets-bjet Pt', 100, 0, 500)\n self.add('Eta_bb', 'closest bjet pair Eta', 100, 0, 500)\n self.add('leptonPt', 'lepton Pt', 100, 0, 250)\n self.add('MET', 'MET', 100, 0, 300)\n self.add('M_jj', 'leading jet-jet Mass', 100, 0, 300)\n self.add('M_jjb1', 'hadronic top reco Mass', 100, 0, 700)\n self.add('M_jjb2', 'hadronic top reco Mass', 100, 0, 700)\n self.add2D('M_jjb_2D', 'M_jjb1 vs. M_jjb2', 100, 0, 700, 100, 0, 700)\n self.add2D('M_jj_NPU', 'NPU vs. M_jj', 80, 0, 300, 80, 80, 200)\n self.add('M_jjl', 'leading jets-lepton Mass', 100, 0, 450)\n self.add('M_jjlnu', 'leading jets-lepton-MET Mass', 100, 0, 800)\n self.add('M_j1l', 'closest jet-lepton Mass', 100, 0, 450)\n self.add('M_bb_leading', 'leading bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_closest', 'closest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bb_farthest', 'farthest bjet-bjet Mass', 100, 0, 300)\n self.add('M_bl', 'closest bjet-lepton Mass', 100, 0, 300)\n self.add('MT_lnu', 'Wlnu Mt', 100, 0, 200)\n self.add('MT_jjlnu', 'HWW Mt', 100, 0, 300)\n self.add('M_b1lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add('M_b2lnu', 'leptonic top reco Mass', 100, 0, 500)\n self.add2D('M_blnu_2D', 'M_b1lnu vs. M_b2lnu', 100, 0, 500, 100, 0, 500\n )\n self.add('DeltaR_jj', 'leading jet-jet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1l', 'closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_j2l', '2nd closest jet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_jjl', 'leading jets-lepton DeltaR', 100, 0, 4.5)\n self.add('DeltaR_jjb', 'leading jets-bjet DeltaR', 100, 0, 4.5)\n self.add('DeltaR_j1lbb', 'closest jet-lepton-bjets DeltaR', 100, 0, 4.5\n )\n self.add('DeltaR_jjlbb', 'leading jets-lepton-bjets DeltaR', 100, 0,\n 4.5)\n self.add('DeltaR_jjbbl', 'leading jets-bjet-bjet-lepton DeltaR', \n 100, 0, 4.5)\n self.add('DeltaR_bb1', 'closest bjet-bjet pair DeltaR', 100, 0, 4)\n self.add('DeltaR_b1l', 'farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaR_b2l', '2nd farthest bjet-lepton DeltaR', 100, 0, 4)\n self.add('DeltaPhi_jj', 'leading jet-jet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1l', 'closest jet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j2l', '2nd closest jet-lepton DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_jjl', 'leading jets-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjb', 'leading jets-bjet DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_j1lbb', 'closest jet-lepton-bjets DeltaPhi', 100,\n 0, 3.5)\n self.add('DeltaPhi_jjlbb', 'leading jets-lepton-bjets DeltaPhi', \n 100, 0, 3.5)\n self.add('DeltaPhi_jjbbl', 'leading jets-bjet-bjet-lepton DeltaPhi',\n 100, 0, 3.5)\n self.add('DeltaPhi_bb1', 'closest bjet-bjet pair DeltaPhi', 100, 0, 3.5\n )\n self.add('DeltaPhi_b1l', 'farthest bjet-lepton DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_b2l', '2nd farthest bjet-lepton DeltaPhi', 100, \n 0, 3.5)\n self.add('DeltaPhi_lMET', 'lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add('DeltaPhi_jjlnu', 'jets-lepton-MET DeltaPhi', 100, 0, 3.5)\n self.add2D('DeltaEtaDeltaPhi_jj',\n 'leading jet-jet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1l',\n 'closest jet-lepton combination DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j2l',\n '2nd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j3l',\n '3rd closest jet-lepton combination DeltaPhi vs. DeltaEta', 50,\n 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjl',\n 'leading jets-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2\n )\n self.add2D('DeltaEtaDeltaPhi_jjb',\n 'leading jets-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_j1lbb',\n 'closest jet-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjlbb',\n 'leading jets-lepton-bjets DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_jjbbl',\n 'leading jets-bjet-bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, \n 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_bb1',\n 'closest bjet-bjet DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0, 3.2)\n self.add2D('DeltaEtaDeltaPhi_b1l',\n 'farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, 50, 0,\n 3.2)\n self.add2D('DeltaEtaDeltaPhi_b2l',\n '2nd farthest bjet-lepton DeltaPhi vs. DeltaEta', 50, 0, 3.5, \n 50, 0, 3.2)\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n <docstring token>\n\n def __init__(self, dir=None, dataset=None, mode='plots'):\n BaseControlPlots.__init__(self, dir=dir, purpose='cleanup', dataset\n =dataset, mode=mode)\n <function token>\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n <docstring token>\n <function token>\n <function token>\n\n def process(self, event):\n result = {}\n jets = event.cleanedJets20[:]\n alljets = [j for j in event.jets if j.PT > 20 and abs(j.Eta) < 2.5]\n bjets = event.bjets30[:]\n result['Njets20'] = len(event.cleanedJets20)\n result['Njets30'] = len(event.cleanedJets30)\n result['Nbjets30'] = len(event.bjets30)\n if len(jets) > 3 and len(event.leadingLeptons) == 1 and event.met[0\n ].MET > 20:\n result['Nbjets30_cut_PUPPI'] = len(event.bjets30)\n result['Nbjets30_cut_all'] = len([j for j in alljets if j.BTag and\n j.PT > 30])\n NPU = event.npu[0]\n lepton = None\n p_neutrino = None\n MET = event.met[0]\n if len(event.leadingLeptons):\n lepton = event.leadingLeptons[0]\n p_neutrino = recoNeutrino(lepton.TLV, MET)\n bl = []\n p_bl = None\n if lepton and bjets:\n bl = sorted(bjets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV),\n reverse=True)\n DeltaPhi = fold(abs(lepton.Phi - bl[0].Phi))\n DeltaEta = abs(lepton.Eta - bl[0].Eta)\n p_bl = lepton.TLV + bl[-1].TLV\n result['M_bl'] = p_bl.M()\n result['Pt_bl'] = p_bl.Pt()\n result['DeltaR_b1l'] = TLV.DeltaR(lepton.TLV, bl[0].TLV)\n result['DeltaPhi_b1l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b1l'] = [[DeltaEta, DeltaPhi]]\n if len(bl) > 1:\n DeltaPhi = fold(abs(lepton.Phi - bl[1].Phi))\n DeltaEta = abs(lepton.Eta - bl[1].Eta)\n result['M_bb_farthest'] = (bl[0].TLV + bl[1].TLV).M()\n result['DeltaR_b2l'] = TLV.DeltaR(lepton.TLV, bl[1].TLV)\n result['DeltaPhi_b2l'] = DeltaPhi\n result['DeltaEtaDeltaPhi_b2l'] = [[DeltaEta, DeltaPhi]]\n DeltaR_bb_closest = 1000\n bjet_closest = []\n p_bb1 = None\n for j1, j2 in combinations(bjets, 2):\n p_bb = j1.TLV + j2.TLV\n DeltaR = TLV.DeltaR(j1.TLV, j2.TLV)\n if DeltaR < DeltaR_bb_closest:\n bjet_closest = [j1, j2]\n p_bb1 = p_bb\n result['M_bb_closest'] = p_bb.M()\n result['Pt_bb'] = p_bb.Pt()\n result['DeltaR_bb1'] = TLV.DeltaR(j1.TLV, j2.TLV)\n result['DeltaPhi_bb1'] = fold(abs(j1.Phi - j2.Phi))\n result['DeltaEtaDeltaPhi_bb1'] = [[abs(j1.Eta - j2.Eta),\n result['DeltaPhi_bb1']]]\n DeltaR_bb_closest = DeltaR\n if len(bjets) > 1:\n result['M_bb_leading'] = (bjets[0].TLV + bjets[1].TLV).M()\n for bjet in bjet_closest:\n jets.remove(bjet)\n if len(jets) > 0:\n result['jet1Pt'] = jets[0].PT\n if len(jets) > 1:\n result['jet2Pt'] = jets[1].PT\n if len(bjets) > 1:\n result['bjet1Pt'] = bjet_closest[0].PT\n result['bjet2Pt'] = bjet_closest[1].PT\n elif len(bjets):\n result['bjet1Pt'] = bjets[0].PT\n if len(jets) > 1:\n p_jj = jets[0].TLV + jets[1].TLV\n result['M_jj'] = p_jj.M()\n result['DeltaR_jj'] = TLV.DeltaR(jets[0].TLV, jets[1].TLV)\n result['DeltaPhi_jj'] = fold(abs(jets[0].Phi - jets[1].Phi))\n result['DeltaEtaDeltaPhi_jj'] = [[abs(jets[0].Eta - jets[1].Eta\n ), result['DeltaPhi_jj']]]\n result['M_jj_NPU'] = [[p_jj.M(), NPU.HT]]\n if lepton:\n p_jjl = p_jj + lepton.TLV\n result['M_jjl'] = p_jjl.M()\n result['Pt_jjl'] = p_jjl.Pt()\n result['M_jjlnu'] = (p_jj + lepton.TLV + p_neutrino).M()\n result['DeltaR_jjl'] = TLV.DeltaR(p_jj, lepton.TLV)\n result['DeltaPhi_jjl'] = fold(abs(p_jj.Phi() - lepton.Phi))\n result['DeltaEtaDeltaPhi_jjl'] = [[abs(p_jj.Eta() - lepton.\n Eta), result['DeltaPhi_jjl']]]\n result['DeltaPhi_jjlnu'] = fold(abs(p_jjl.Phi() - MET.Phi))\n result['MT_jjlnu'] = sqrt(2 * MET.MET * p_jjl.Pt() * (1 -\n cos(p_jjl.Phi() - MET.Phi)))\n if len(bl) > 1:\n p_blnu = bl[-2].TLV + lepton.TLV + p_neutrino\n p_b2lnu = bl[-1].TLV + lepton.TLV + p_neutrino\n result['M_b1lnu'] = p_blnu.M()\n result['M_b2lnu'] = p_b2lnu.M()\n result['M_blnu_2D'] = [[result['M_b1lnu'], result[\n 'M_b2lnu']]]\n result['Pt_b1lnu'] = p_blnu.Pt()\n result['Pt_b2lnu'] = p_b2lnu.Pt()\n if len(event.cleanedJets20) > 3:\n jets_tt = event.cleanedJets20[:]\n jets_tt.remove(bl[-1])\n jets_tt.remove(bl[-2])\n p_jj = jets_tt[0].TLV + jets_tt[1].TLV\n p_jjb = p_jj + bl[-2].TLV\n p_jjb2 = p_jj + bl[-1].TLV\n result['M_jjl'] = p_jjl.M()\n result['M_jjb1'] = p_jjb.M()\n result['M_jjb2'] = p_jjb2.M()\n result['M_jjb_2D'] = [[result['M_jjb1'], result[\n 'M_jjb2']]]\n result['Pt_jjb1'] = p_jjb.Pt()\n result['Pt_jjb2'] = p_jjb2.Pt()\n result['DeltaR_jjb'] = TLV.DeltaR(p_jj, bl[-2].TLV)\n result['DeltaPhi_jjb'] = fold(abs(p_jj.Phi() - bl[-\n 2].Phi))\n result['DeltaEtaDeltaPhi_jjb'] = [[abs(p_jj.Eta() -\n bl[-2].Eta), result['DeltaPhi_jjb']]]\n result['DeltaR_jjlbb'] = TLV.DeltaR(p_jjl, p_bb1)\n result['DeltaPhi_jjlbb'] = fold(abs(p_jjl.Phi() -\n p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_jjlbb'] = [[abs(p_jjl.Eta(\n ) - p_bb1.Eta()), result['DeltaPhi_jjlbb']]]\n result['DeltaR_jjbbl'] = TLV.DeltaR(p_jjb, p_bl)\n result['DeltaPhi_jjbbl'] = fold(abs(p_jjb.Phi() -\n p_bl.Phi()))\n result['DeltaEtaDeltaPhi_jjbbl'] = [[abs(p_jjb.Eta(\n ) - p_bl.Eta()), result['DeltaPhi_jjbbl']]]\n if lepton:\n result['leptonPt'] = lepton.PT\n result['MET'] = MET.MET\n result['DeltaPhi_lMET'] = abs(MET.Phi - lepton.Phi)\n result['MT_lnu'] = recoWlnu2Mt(lepton, MET)\n ji = sorted(jets, key=lambda j: TLV.DeltaR(j.TLV, lepton.TLV))[:3]\n if len(ji) > 0 and p_bb1:\n p_j1l = lepton.TLV + ji[0].TLV\n result['M_j1l'] = p_j1l.M()\n result['Pt_j1l'] = p_j1l.Pt()\n result['DeltaR_j1l'] = TLV.DeltaR(lepton.TLV, ji[0].TLV)\n result['DeltaPhi_j1l'] = fold(abs(lepton.Phi - ji[0].Phi))\n result['DeltaEtaDeltaPhi_j1l'] = [[abs(lepton.Eta - ji[0].\n Eta), result['DeltaPhi_j1l']]]\n result['DeltaR_j1lbb'] = TLV.DeltaR(p_j1l, p_bb1)\n result['DeltaPhi_j1lbb'] = fold(abs(p_j1l.Phi() - p_bb1.Phi()))\n result['DeltaEtaDeltaPhi_j1lbb'] = [[abs(p_j1l.Eta() -\n p_bb1.Eta()), result['DeltaPhi_j1lbb']]]\n if len(ji) > 1:\n result['DeltaR_j2l'] = TLV.DeltaR(lepton.TLV, ji[1].TLV)\n result['DeltaPhi_j2l'] = fold(abs(lepton.Phi - ji[1].Phi))\n result['DeltaEtaDeltaPhi_j2l'] = [[abs(lepton.Eta - ji[\n 1].Eta), result['DeltaPhi_j2l']]]\n if len(ji) > 2:\n result['DeltaEtaDeltaPhi_j3l'] = [[abs(lepton.Eta -\n ji[2].Eta), fold(abs(lepton.Phi - ji[2].Phi))]]\n result['cleanup'] = []\n for var in tree_vars:\n if var in result:\n result['cleanup'].append(result[var])\n else:\n del result['cleanup']\n break\n return result\n\n\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass CleanUpControlPlots(BaseControlPlots):\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n\n<code token>\n",
"<import token>\n<assignment token>\n<class token>\n<code token>\n"
] | false |
99,048 |
65ee92fd6ef98331114e8dcab2aee9305282789d
|
# encoding: utf-8
from __future__ import unicode_literals
from django.db import models
class Hobby(models.Model):
name = models.CharField(max_length=50, verbose_name="Hobby name")
def __str__(self):
return self.name
class Meta:
"""
Meta class for Hobbies
"""
verbose_name_plural = 'Hobby'
|
[
"# encoding: utf-8\nfrom __future__ import unicode_literals\nfrom django.db import models\n\nclass Hobby(models.Model):\n\n name = models.CharField(max_length=50, verbose_name=\"Hobby name\")\n \n def __str__(self):\n return self.name\n\n class Meta:\n \"\"\"\n Meta class for Hobbies\n \"\"\"\n verbose_name_plural = 'Hobby'",
"from __future__ import unicode_literals\nfrom django.db import models\n\n\nclass Hobby(models.Model):\n name = models.CharField(max_length=50, verbose_name='Hobby name')\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n \"\"\"\n Meta class for Hobbies\n \"\"\"\n verbose_name_plural = 'Hobby'\n",
"<import token>\n\n\nclass Hobby(models.Model):\n name = models.CharField(max_length=50, verbose_name='Hobby name')\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n \"\"\"\n Meta class for Hobbies\n \"\"\"\n verbose_name_plural = 'Hobby'\n",
"<import token>\n\n\nclass Hobby(models.Model):\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\n class Meta:\n \"\"\"\n Meta class for Hobbies\n \"\"\"\n verbose_name_plural = 'Hobby'\n",
"<import token>\n\n\nclass Hobby(models.Model):\n <assignment token>\n <function token>\n\n\n class Meta:\n \"\"\"\n Meta class for Hobbies\n \"\"\"\n verbose_name_plural = 'Hobby'\n",
"<import token>\n<class token>\n"
] | false |
99,049 |
8011fa08c8da28ad02627e454865236a7cc3e5e1
|
import pandas as pd
import numpy as np
from io import StringIO
from AlgorithmImports import *
class RedditStockSentiment(QCAlgorithm):
def Initialize(self):
self.SetStartDate(2021,3, 1) # Set Start Date
self.SetEndDate(2021, 6, 18) #Set End Date
self.SetCash(100000) # Set Strategy Cash
self.tickers = ["CLNE", "AMC","BB","PLTR","NVDA","TSLA","CLOV","GME","AMD","CLF","UWMC","WKHS","AAPL","AMZN","TLRY","PRPL","SOFI","NIO","DKNG","NNDM","ET","CRSR","ITUB","ASO","BABA","GLD","ARVL","WISH","VIAC","SNDL","GOEV","WOOF","SENS","NET","ME","HUYA","DIS","GOOGL","MSFT","SPCE","TIL","RKT","JPM","EM","APP","LEV","F","SQQQ","TQQQ","CVAC","ARKK","SLV","FB","NOK","OCGN","SQ","XPEV","JD","VZIO","XLF","HYLN","GE","NFLX","ROPE","WEN","FSR","TLT","SPOT","MT","TTD","BA","SI","FUBO","PYPL","WFC","ENPH","BAC","XOM","INTC","PSFE","TAL","ZM","COIN","TRCH","SCR","ROOT","QS","SKLZ","ATOS","GEO","UVXY","SHOP","RBLX","DE","GM","LI","UPS","DASH","ROKU","NKLA","WTI","CHPT","SWBI","FINV","VXRT","OXY","WIT","MX","PLUG","ZNGA","TM","MARA","IDEX","ADBE","ABNB","DDS","WMT","TX","IWM","ASAN","RIOT","MVIS","MNMD","PINS","ARKF","BBY","GUSH","PENN","NNOX","STEM","BYND","LUV","NUE","IOVA","NEE","PS","MRO","OGS","RUN","XLE","FCEL","MCD","UPST","ETSY","JMIA","DIA","BNGO","SDC","EDU","UBER","ZIM","OPEN","MSOS","MOO","NKE","HD","RNG","PATH","WLK","RAIN","FCX","SNAP","CPNG","MAPS","INO","LEN","SOLO","PTON","MU","HSY"]
self.investP = 1/len(self.tickers) #Equal weight portfolio
self.SetWarmup(TimeSpan.FromDays(65))
# self.Settings.RebalancePortfolioOnInsightChanges = False
# self.Settings.RebalancePortfolioOnSecurityChanges = False
# self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(lambda time: None))
# self.SetPortfolioConstruction(InsightWeightingPortfolioConstructionModel(
# rebalancingParam = timedelta(days = 30),
# portfolioBias = PortfolioBias.Long))
self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(self.RebalanceFunction))
for stock in self.tickers:
self.AddEquity(stock, Resolution.Daily) #Sets resolution to hour bars
self.AddRiskManagement(TrailingStopRiskManagementModel(0.08)) #Risk management
self.trade = True #OnData will run when the program when the program is first executed
csv = self.Download("https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv") #Downloads data
self.df = pd.read_csv(StringIO(csv)) #Read into a dataframe
self.Schedule.On(self.DateRules.EveryDay(),
self.TimeRules.At(10, 30),
self.runDaily) #Runs runDaily (sets self.trade to True) at 8:30am Chicago time
def RebalanceFunction(self, time):
# for performance only run rebalance logic once a week, monday
if time.weekday() != 0:
return None
def OnData(self, data):
algYear = self.Time.year
algMonth = self.Time.month
algDay = self.Time.day
if(algYear == 2021 and algMonth == 3 and algDay == 2):
self.MarketOrder("PYPL", 36)
def runDaily(self):
self.trade = True
# class PortfolioRebalanceOnCustomFuncRegressionAlgorithm(QCAlgorithm):
# def Initialize(self):
# ''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
# self.UniverseSettings.Resolution = Resolution.Daily
# self.SetStartDate(2015, 1, 1)
# self.SetEndDate(2018, 1, 1)
# self.Settings.RebalancePortfolioOnInsightChanges = False;
# self.Settings.RebalancePortfolioOnSecurityChanges = False;
# self.SetUniverseSelection(CustomUniverseSelectionModel("CustomUniverseSelectionModel", lambda time: [ "AAPL", "IBM", "FB", "SPY", "AIG", "BAC", "BNO" ]))
# self.SetAlpha(ConstantAlphaModel(InsightType.Price, InsightDirection.Up, TimeSpan.FromMinutes(20), 0.025, None));
# self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(self.RebalanceFunction))
# self.SetExecution(ImmediateExecutionModel())
# self.lastRebalanceTime = self.StartDate
# def RebalanceFunction(self, time):
# # for performance only run rebalance logic once a week, monday
# if time.weekday() != 0:
# return None
# if self.lastRebalanceTime == self.StartDate:
# # initial rebalance
# self.lastRebalanceTime = time;
# return time;
# deviation = 0;
# count = sum(1 for security in self.Securities.Values if security.Invested)
# if count > 0:
# self.lastRebalanceTime = time;
# portfolioValuePerSecurity = self.Portfolio.TotalPortfolioValue / count;
# for security in self.Securities.Values:
# if not security.Invested:
# continue
# reservedBuyingPowerForCurrentPosition = (security.BuyingPowerModel.GetReservedBuyingPowerForPosition(
# ReservedBuyingPowerForPositionParameters(security)).AbsoluteUsedBuyingPower
# * security.BuyingPowerModel.GetLeverage(security)) # see GH issue 4107
# # we sum up deviation for each security
# deviation += (portfolioValuePerSecurity - reservedBuyingPowerForCurrentPosition) / portfolioValuePerSecurity;
# # if securities are deviated 1.5% from their theoretical share of TotalPortfolioValue we rebalance
# if deviation >= 0.015:
# return time
# return None
# def OnOrderEvent(self, orderEvent):
# if orderEvent.Status == OrderStatus.Submitted:
# if self.UtcTime != self.lastRebalanceTime or self.UtcTime.weekday() != 0:
# raise ValueError(f"{self.UtcTime} {orderEvent.Symbol}")
|
[
"import pandas as pd\nimport numpy as np\nfrom io import StringIO\nfrom AlgorithmImports import *\n\nclass RedditStockSentiment(QCAlgorithm):\n\n def Initialize(self):\n self.SetStartDate(2021,3, 1) # Set Start Date\n self.SetEndDate(2021, 6, 18) #Set End Date\n self.SetCash(100000) # Set Strategy Cash\n self.tickers = [\"CLNE\", \"AMC\",\"BB\",\"PLTR\",\"NVDA\",\"TSLA\",\"CLOV\",\"GME\",\"AMD\",\"CLF\",\"UWMC\",\"WKHS\",\"AAPL\",\"AMZN\",\"TLRY\",\"PRPL\",\"SOFI\",\"NIO\",\"DKNG\",\"NNDM\",\"ET\",\"CRSR\",\"ITUB\",\"ASO\",\"BABA\",\"GLD\",\"ARVL\",\"WISH\",\"VIAC\",\"SNDL\",\"GOEV\",\"WOOF\",\"SENS\",\"NET\",\"ME\",\"HUYA\",\"DIS\",\"GOOGL\",\"MSFT\",\"SPCE\",\"TIL\",\"RKT\",\"JPM\",\"EM\",\"APP\",\"LEV\",\"F\",\"SQQQ\",\"TQQQ\",\"CVAC\",\"ARKK\",\"SLV\",\"FB\",\"NOK\",\"OCGN\",\"SQ\",\"XPEV\",\"JD\",\"VZIO\",\"XLF\",\"HYLN\",\"GE\",\"NFLX\",\"ROPE\",\"WEN\",\"FSR\",\"TLT\",\"SPOT\",\"MT\",\"TTD\",\"BA\",\"SI\",\"FUBO\",\"PYPL\",\"WFC\",\"ENPH\",\"BAC\",\"XOM\",\"INTC\",\"PSFE\",\"TAL\",\"ZM\",\"COIN\",\"TRCH\",\"SCR\",\"ROOT\",\"QS\",\"SKLZ\",\"ATOS\",\"GEO\",\"UVXY\",\"SHOP\",\"RBLX\",\"DE\",\"GM\",\"LI\",\"UPS\",\"DASH\",\"ROKU\",\"NKLA\",\"WTI\",\"CHPT\",\"SWBI\",\"FINV\",\"VXRT\",\"OXY\",\"WIT\",\"MX\",\"PLUG\",\"ZNGA\",\"TM\",\"MARA\",\"IDEX\",\"ADBE\",\"ABNB\",\"DDS\",\"WMT\",\"TX\",\"IWM\",\"ASAN\",\"RIOT\",\"MVIS\",\"MNMD\",\"PINS\",\"ARKF\",\"BBY\",\"GUSH\",\"PENN\",\"NNOX\",\"STEM\",\"BYND\",\"LUV\",\"NUE\",\"IOVA\",\"NEE\",\"PS\",\"MRO\",\"OGS\",\"RUN\",\"XLE\",\"FCEL\",\"MCD\",\"UPST\",\"ETSY\",\"JMIA\",\"DIA\",\"BNGO\",\"SDC\",\"EDU\",\"UBER\",\"ZIM\",\"OPEN\",\"MSOS\",\"MOO\",\"NKE\",\"HD\",\"RNG\",\"PATH\",\"WLK\",\"RAIN\",\"FCX\",\"SNAP\",\"CPNG\",\"MAPS\",\"INO\",\"LEN\",\"SOLO\",\"PTON\",\"MU\",\"HSY\"]\n self.investP = 1/len(self.tickers) #Equal weight portfolio\n self.SetWarmup(TimeSpan.FromDays(65))\n\n # self.Settings.RebalancePortfolioOnInsightChanges = False\n # self.Settings.RebalancePortfolioOnSecurityChanges = False\n # self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(lambda time: None))\n # self.SetPortfolioConstruction(InsightWeightingPortfolioConstructionModel(\n # rebalancingParam = timedelta(days = 30), \n # portfolioBias = PortfolioBias.Long))\n self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(self.RebalanceFunction))\n\n for stock in self.tickers:\n self.AddEquity(stock, Resolution.Daily) #Sets resolution to hour bars\n \n self.AddRiskManagement(TrailingStopRiskManagementModel(0.08)) #Risk management\n \n self.trade = True #OnData will run when the program when the program is first executed\n \n csv = self.Download(\"https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv\") #Downloads data\n self.df = pd.read_csv(StringIO(csv)) #Read into a dataframe\n \n self.Schedule.On(self.DateRules.EveryDay(), \n self.TimeRules.At(10, 30), \n self.runDaily) #Runs runDaily (sets self.trade to True) at 8:30am Chicago time\n\n def RebalanceFunction(self, time):\n # for performance only run rebalance logic once a week, monday\n if time.weekday() != 0:\n return None\n\n def OnData(self, data):\n algYear = self.Time.year\n algMonth = self.Time.month\n algDay = self.Time.day\n if(algYear == 2021 and algMonth == 3 and algDay == 2):\n self.MarketOrder(\"PYPL\", 36)\n\n\n def runDaily(self):\n self.trade = True\n\n# class PortfolioRebalanceOnCustomFuncRegressionAlgorithm(QCAlgorithm):\n# def Initialize(self):\n# ''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''\n\n# self.UniverseSettings.Resolution = Resolution.Daily\n\n# self.SetStartDate(2015, 1, 1)\n# self.SetEndDate(2018, 1, 1)\n\n# self.Settings.RebalancePortfolioOnInsightChanges = False;\n# self.Settings.RebalancePortfolioOnSecurityChanges = False;\n\n# self.SetUniverseSelection(CustomUniverseSelectionModel(\"CustomUniverseSelectionModel\", lambda time: [ \"AAPL\", \"IBM\", \"FB\", \"SPY\", \"AIG\", \"BAC\", \"BNO\" ]))\n# self.SetAlpha(ConstantAlphaModel(InsightType.Price, InsightDirection.Up, TimeSpan.FromMinutes(20), 0.025, None));\n# self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel(self.RebalanceFunction))\n# self.SetExecution(ImmediateExecutionModel())\n# self.lastRebalanceTime = self.StartDate\n\n# def RebalanceFunction(self, time):\n# # for performance only run rebalance logic once a week, monday\n# if time.weekday() != 0:\n# return None\n\n# if self.lastRebalanceTime == self.StartDate:\n# # initial rebalance\n# self.lastRebalanceTime = time;\n# return time;\n\n# deviation = 0;\n# count = sum(1 for security in self.Securities.Values if security.Invested)\n# if count > 0:\n# self.lastRebalanceTime = time;\n# portfolioValuePerSecurity = self.Portfolio.TotalPortfolioValue / count;\n# for security in self.Securities.Values:\n# if not security.Invested:\n# continue\n# reservedBuyingPowerForCurrentPosition = (security.BuyingPowerModel.GetReservedBuyingPowerForPosition(\n# ReservedBuyingPowerForPositionParameters(security)).AbsoluteUsedBuyingPower\n# * security.BuyingPowerModel.GetLeverage(security)) # see GH issue 4107\n# # we sum up deviation for each security\n# deviation += (portfolioValuePerSecurity - reservedBuyingPowerForCurrentPosition) / portfolioValuePerSecurity;\n\n# # if securities are deviated 1.5% from their theoretical share of TotalPortfolioValue we rebalance\n# if deviation >= 0.015:\n# return time\n# return None\n\n# def OnOrderEvent(self, orderEvent):\n# if orderEvent.Status == OrderStatus.Submitted:\n# if self.UtcTime != self.lastRebalanceTime or self.UtcTime.weekday() != 0:\n# raise ValueError(f\"{self.UtcTime} {orderEvent.Symbol}\")\n",
"import pandas as pd\nimport numpy as np\nfrom io import StringIO\nfrom AlgorithmImports import *\n\n\nclass RedditStockSentiment(QCAlgorithm):\n\n def Initialize(self):\n self.SetStartDate(2021, 3, 1)\n self.SetEndDate(2021, 6, 18)\n self.SetCash(100000)\n self.tickers = ['CLNE', 'AMC', 'BB', 'PLTR', 'NVDA', 'TSLA', 'CLOV',\n 'GME', 'AMD', 'CLF', 'UWMC', 'WKHS', 'AAPL', 'AMZN', 'TLRY',\n 'PRPL', 'SOFI', 'NIO', 'DKNG', 'NNDM', 'ET', 'CRSR', 'ITUB',\n 'ASO', 'BABA', 'GLD', 'ARVL', 'WISH', 'VIAC', 'SNDL', 'GOEV',\n 'WOOF', 'SENS', 'NET', 'ME', 'HUYA', 'DIS', 'GOOGL', 'MSFT',\n 'SPCE', 'TIL', 'RKT', 'JPM', 'EM', 'APP', 'LEV', 'F', 'SQQQ',\n 'TQQQ', 'CVAC', 'ARKK', 'SLV', 'FB', 'NOK', 'OCGN', 'SQ',\n 'XPEV', 'JD', 'VZIO', 'XLF', 'HYLN', 'GE', 'NFLX', 'ROPE',\n 'WEN', 'FSR', 'TLT', 'SPOT', 'MT', 'TTD', 'BA', 'SI', 'FUBO',\n 'PYPL', 'WFC', 'ENPH', 'BAC', 'XOM', 'INTC', 'PSFE', 'TAL',\n 'ZM', 'COIN', 'TRCH', 'SCR', 'ROOT', 'QS', 'SKLZ', 'ATOS',\n 'GEO', 'UVXY', 'SHOP', 'RBLX', 'DE', 'GM', 'LI', 'UPS', 'DASH',\n 'ROKU', 'NKLA', 'WTI', 'CHPT', 'SWBI', 'FINV', 'VXRT', 'OXY',\n 'WIT', 'MX', 'PLUG', 'ZNGA', 'TM', 'MARA', 'IDEX', 'ADBE',\n 'ABNB', 'DDS', 'WMT', 'TX', 'IWM', 'ASAN', 'RIOT', 'MVIS',\n 'MNMD', 'PINS', 'ARKF', 'BBY', 'GUSH', 'PENN', 'NNOX', 'STEM',\n 'BYND', 'LUV', 'NUE', 'IOVA', 'NEE', 'PS', 'MRO', 'OGS', 'RUN',\n 'XLE', 'FCEL', 'MCD', 'UPST', 'ETSY', 'JMIA', 'DIA', 'BNGO',\n 'SDC', 'EDU', 'UBER', 'ZIM', 'OPEN', 'MSOS', 'MOO', 'NKE', 'HD',\n 'RNG', 'PATH', 'WLK', 'RAIN', 'FCX', 'SNAP', 'CPNG', 'MAPS',\n 'INO', 'LEN', 'SOLO', 'PTON', 'MU', 'HSY']\n self.investP = 1 / len(self.tickers)\n self.SetWarmup(TimeSpan.FromDays(65))\n self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel\n (self.RebalanceFunction))\n for stock in self.tickers:\n self.AddEquity(stock, Resolution.Daily)\n self.AddRiskManagement(TrailingStopRiskManagementModel(0.08))\n self.trade = True\n csv = self.Download(\n 'https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv'\n )\n self.df = pd.read_csv(StringIO(csv))\n self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.At(10, \n 30), self.runDaily)\n\n def RebalanceFunction(self, time):\n if time.weekday() != 0:\n return None\n\n def OnData(self, data):\n algYear = self.Time.year\n algMonth = self.Time.month\n algDay = self.Time.day\n if algYear == 2021 and algMonth == 3 and algDay == 2:\n self.MarketOrder('PYPL', 36)\n\n def runDaily(self):\n self.trade = True\n",
"<import token>\n\n\nclass RedditStockSentiment(QCAlgorithm):\n\n def Initialize(self):\n self.SetStartDate(2021, 3, 1)\n self.SetEndDate(2021, 6, 18)\n self.SetCash(100000)\n self.tickers = ['CLNE', 'AMC', 'BB', 'PLTR', 'NVDA', 'TSLA', 'CLOV',\n 'GME', 'AMD', 'CLF', 'UWMC', 'WKHS', 'AAPL', 'AMZN', 'TLRY',\n 'PRPL', 'SOFI', 'NIO', 'DKNG', 'NNDM', 'ET', 'CRSR', 'ITUB',\n 'ASO', 'BABA', 'GLD', 'ARVL', 'WISH', 'VIAC', 'SNDL', 'GOEV',\n 'WOOF', 'SENS', 'NET', 'ME', 'HUYA', 'DIS', 'GOOGL', 'MSFT',\n 'SPCE', 'TIL', 'RKT', 'JPM', 'EM', 'APP', 'LEV', 'F', 'SQQQ',\n 'TQQQ', 'CVAC', 'ARKK', 'SLV', 'FB', 'NOK', 'OCGN', 'SQ',\n 'XPEV', 'JD', 'VZIO', 'XLF', 'HYLN', 'GE', 'NFLX', 'ROPE',\n 'WEN', 'FSR', 'TLT', 'SPOT', 'MT', 'TTD', 'BA', 'SI', 'FUBO',\n 'PYPL', 'WFC', 'ENPH', 'BAC', 'XOM', 'INTC', 'PSFE', 'TAL',\n 'ZM', 'COIN', 'TRCH', 'SCR', 'ROOT', 'QS', 'SKLZ', 'ATOS',\n 'GEO', 'UVXY', 'SHOP', 'RBLX', 'DE', 'GM', 'LI', 'UPS', 'DASH',\n 'ROKU', 'NKLA', 'WTI', 'CHPT', 'SWBI', 'FINV', 'VXRT', 'OXY',\n 'WIT', 'MX', 'PLUG', 'ZNGA', 'TM', 'MARA', 'IDEX', 'ADBE',\n 'ABNB', 'DDS', 'WMT', 'TX', 'IWM', 'ASAN', 'RIOT', 'MVIS',\n 'MNMD', 'PINS', 'ARKF', 'BBY', 'GUSH', 'PENN', 'NNOX', 'STEM',\n 'BYND', 'LUV', 'NUE', 'IOVA', 'NEE', 'PS', 'MRO', 'OGS', 'RUN',\n 'XLE', 'FCEL', 'MCD', 'UPST', 'ETSY', 'JMIA', 'DIA', 'BNGO',\n 'SDC', 'EDU', 'UBER', 'ZIM', 'OPEN', 'MSOS', 'MOO', 'NKE', 'HD',\n 'RNG', 'PATH', 'WLK', 'RAIN', 'FCX', 'SNAP', 'CPNG', 'MAPS',\n 'INO', 'LEN', 'SOLO', 'PTON', 'MU', 'HSY']\n self.investP = 1 / len(self.tickers)\n self.SetWarmup(TimeSpan.FromDays(65))\n self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel\n (self.RebalanceFunction))\n for stock in self.tickers:\n self.AddEquity(stock, Resolution.Daily)\n self.AddRiskManagement(TrailingStopRiskManagementModel(0.08))\n self.trade = True\n csv = self.Download(\n 'https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv'\n )\n self.df = pd.read_csv(StringIO(csv))\n self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.At(10, \n 30), self.runDaily)\n\n def RebalanceFunction(self, time):\n if time.weekday() != 0:\n return None\n\n def OnData(self, data):\n algYear = self.Time.year\n algMonth = self.Time.month\n algDay = self.Time.day\n if algYear == 2021 and algMonth == 3 and algDay == 2:\n self.MarketOrder('PYPL', 36)\n\n def runDaily(self):\n self.trade = True\n",
"<import token>\n\n\nclass RedditStockSentiment(QCAlgorithm):\n\n def Initialize(self):\n self.SetStartDate(2021, 3, 1)\n self.SetEndDate(2021, 6, 18)\n self.SetCash(100000)\n self.tickers = ['CLNE', 'AMC', 'BB', 'PLTR', 'NVDA', 'TSLA', 'CLOV',\n 'GME', 'AMD', 'CLF', 'UWMC', 'WKHS', 'AAPL', 'AMZN', 'TLRY',\n 'PRPL', 'SOFI', 'NIO', 'DKNG', 'NNDM', 'ET', 'CRSR', 'ITUB',\n 'ASO', 'BABA', 'GLD', 'ARVL', 'WISH', 'VIAC', 'SNDL', 'GOEV',\n 'WOOF', 'SENS', 'NET', 'ME', 'HUYA', 'DIS', 'GOOGL', 'MSFT',\n 'SPCE', 'TIL', 'RKT', 'JPM', 'EM', 'APP', 'LEV', 'F', 'SQQQ',\n 'TQQQ', 'CVAC', 'ARKK', 'SLV', 'FB', 'NOK', 'OCGN', 'SQ',\n 'XPEV', 'JD', 'VZIO', 'XLF', 'HYLN', 'GE', 'NFLX', 'ROPE',\n 'WEN', 'FSR', 'TLT', 'SPOT', 'MT', 'TTD', 'BA', 'SI', 'FUBO',\n 'PYPL', 'WFC', 'ENPH', 'BAC', 'XOM', 'INTC', 'PSFE', 'TAL',\n 'ZM', 'COIN', 'TRCH', 'SCR', 'ROOT', 'QS', 'SKLZ', 'ATOS',\n 'GEO', 'UVXY', 'SHOP', 'RBLX', 'DE', 'GM', 'LI', 'UPS', 'DASH',\n 'ROKU', 'NKLA', 'WTI', 'CHPT', 'SWBI', 'FINV', 'VXRT', 'OXY',\n 'WIT', 'MX', 'PLUG', 'ZNGA', 'TM', 'MARA', 'IDEX', 'ADBE',\n 'ABNB', 'DDS', 'WMT', 'TX', 'IWM', 'ASAN', 'RIOT', 'MVIS',\n 'MNMD', 'PINS', 'ARKF', 'BBY', 'GUSH', 'PENN', 'NNOX', 'STEM',\n 'BYND', 'LUV', 'NUE', 'IOVA', 'NEE', 'PS', 'MRO', 'OGS', 'RUN',\n 'XLE', 'FCEL', 'MCD', 'UPST', 'ETSY', 'JMIA', 'DIA', 'BNGO',\n 'SDC', 'EDU', 'UBER', 'ZIM', 'OPEN', 'MSOS', 'MOO', 'NKE', 'HD',\n 'RNG', 'PATH', 'WLK', 'RAIN', 'FCX', 'SNAP', 'CPNG', 'MAPS',\n 'INO', 'LEN', 'SOLO', 'PTON', 'MU', 'HSY']\n self.investP = 1 / len(self.tickers)\n self.SetWarmup(TimeSpan.FromDays(65))\n self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel\n (self.RebalanceFunction))\n for stock in self.tickers:\n self.AddEquity(stock, Resolution.Daily)\n self.AddRiskManagement(TrailingStopRiskManagementModel(0.08))\n self.trade = True\n csv = self.Download(\n 'https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv'\n )\n self.df = pd.read_csv(StringIO(csv))\n self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.At(10, \n 30), self.runDaily)\n\n def RebalanceFunction(self, time):\n if time.weekday() != 0:\n return None\n <function token>\n\n def runDaily(self):\n self.trade = True\n",
"<import token>\n\n\nclass RedditStockSentiment(QCAlgorithm):\n\n def Initialize(self):\n self.SetStartDate(2021, 3, 1)\n self.SetEndDate(2021, 6, 18)\n self.SetCash(100000)\n self.tickers = ['CLNE', 'AMC', 'BB', 'PLTR', 'NVDA', 'TSLA', 'CLOV',\n 'GME', 'AMD', 'CLF', 'UWMC', 'WKHS', 'AAPL', 'AMZN', 'TLRY',\n 'PRPL', 'SOFI', 'NIO', 'DKNG', 'NNDM', 'ET', 'CRSR', 'ITUB',\n 'ASO', 'BABA', 'GLD', 'ARVL', 'WISH', 'VIAC', 'SNDL', 'GOEV',\n 'WOOF', 'SENS', 'NET', 'ME', 'HUYA', 'DIS', 'GOOGL', 'MSFT',\n 'SPCE', 'TIL', 'RKT', 'JPM', 'EM', 'APP', 'LEV', 'F', 'SQQQ',\n 'TQQQ', 'CVAC', 'ARKK', 'SLV', 'FB', 'NOK', 'OCGN', 'SQ',\n 'XPEV', 'JD', 'VZIO', 'XLF', 'HYLN', 'GE', 'NFLX', 'ROPE',\n 'WEN', 'FSR', 'TLT', 'SPOT', 'MT', 'TTD', 'BA', 'SI', 'FUBO',\n 'PYPL', 'WFC', 'ENPH', 'BAC', 'XOM', 'INTC', 'PSFE', 'TAL',\n 'ZM', 'COIN', 'TRCH', 'SCR', 'ROOT', 'QS', 'SKLZ', 'ATOS',\n 'GEO', 'UVXY', 'SHOP', 'RBLX', 'DE', 'GM', 'LI', 'UPS', 'DASH',\n 'ROKU', 'NKLA', 'WTI', 'CHPT', 'SWBI', 'FINV', 'VXRT', 'OXY',\n 'WIT', 'MX', 'PLUG', 'ZNGA', 'TM', 'MARA', 'IDEX', 'ADBE',\n 'ABNB', 'DDS', 'WMT', 'TX', 'IWM', 'ASAN', 'RIOT', 'MVIS',\n 'MNMD', 'PINS', 'ARKF', 'BBY', 'GUSH', 'PENN', 'NNOX', 'STEM',\n 'BYND', 'LUV', 'NUE', 'IOVA', 'NEE', 'PS', 'MRO', 'OGS', 'RUN',\n 'XLE', 'FCEL', 'MCD', 'UPST', 'ETSY', 'JMIA', 'DIA', 'BNGO',\n 'SDC', 'EDU', 'UBER', 'ZIM', 'OPEN', 'MSOS', 'MOO', 'NKE', 'HD',\n 'RNG', 'PATH', 'WLK', 'RAIN', 'FCX', 'SNAP', 'CPNG', 'MAPS',\n 'INO', 'LEN', 'SOLO', 'PTON', 'MU', 'HSY']\n self.investP = 1 / len(self.tickers)\n self.SetWarmup(TimeSpan.FromDays(65))\n self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel\n (self.RebalanceFunction))\n for stock in self.tickers:\n self.AddEquity(stock, Resolution.Daily)\n self.AddRiskManagement(TrailingStopRiskManagementModel(0.08))\n self.trade = True\n csv = self.Download(\n 'https://raw.githubusercontent.com/sommohapatra/reddit_sentiment/main/Reddit_Sentiment_Equity_new.csv'\n )\n self.df = pd.read_csv(StringIO(csv))\n self.Schedule.On(self.DateRules.EveryDay(), self.TimeRules.At(10, \n 30), self.runDaily)\n\n def RebalanceFunction(self, time):\n if time.weekday() != 0:\n return None\n <function token>\n <function token>\n",
"<import token>\n\n\nclass RedditStockSentiment(QCAlgorithm):\n <function token>\n\n def RebalanceFunction(self, time):\n if time.weekday() != 0:\n return None\n <function token>\n <function token>\n",
"<import token>\n\n\nclass RedditStockSentiment(QCAlgorithm):\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,050 |
c05aaf1e67420cfd76b4a0b2a7543a15ebf0a329
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
from elixir import *
from security import Security
import messages
import gettext
import locale
from entity import User
from entity import Account
import service
from datetime import datetime,timedelta
security = Security()
metadata.bind = 'sqlite:///accounts.sqlite'
metadata.bind.encoding = 'utf-8'
metadata.bind.echo = False
setup_all()
create_all()
commit = session.commit
class UserService:
def add(self,name,email,password):
user = User(name=name,email=email,password=security.password_hash(password))
commit()
def get_user(self,email):
return User.query.filter(User.email==email).first()
def get_accounts(self,user):
return user.accounts
def update_password(self,user,old_password,new_password):
accounts = user.accounts
for account in accounts:
name = security.decrypt(old_password,account.name)
account.name = security.encrypt(new_password,name)
title = security.decrypt(old_password,account.title)
account.title = security.encrypt(new_password,title)
login = security.decrypt(old_password,account.login)
account.login = security.encrypt(new_password,login)
password = security.decrypt(old_password,account.password)
account.password = security.encrypt(new_password,password)
site = security.decrypt(old_password,account.site)
account.site = security.encrypt(new_password,site)
description = security.decrypt(old_password,account.description)
account.description = security.encrypt(new_password,description)
user.password = security.password_hash(new_password)
commit()
def update_email(self,user,email):
user.email = email
commit()
def update_name(self,user,name):
user.name = name
commit()
class AccountService:
def add(self,name,title,login,password,site,description,user_password,user):
name = security.encrypt(user_password,name)
title = security.encrypt(user_password,title)
login = security.encrypt(user_password,login)
password = security.encrypt(user_password,password)
site = security.encrypt(user_password,site)
description = security.encrypt(user_password,description)
account = Account(name=name,title=title,login=login,password=password,site=site,description=description,user=user)
commit()
def get_account(self,user_password,name,user):
accounts = user.accounts
for account in accounts:
if security.decrypt(user_password,account.name) == name:
return account
return None
def find_account(self,user_password,default,user):
accounts = user.accounts
found_accounts = []
default = default.lower()
for account in accounts:
if (security.decrypt(user_password,account.name).lower().find(default) != -1):
found_accounts.append(account)
continue
if (security.decrypt(user_password,account.title).lower().find(default) != -1):
found_accounts.append(account)
continue
if (security.decrypt(user_password,account.login).lower().find(default) != -1):
found_accounts.append(account)
continue
if (security.decrypt(user_password,account.site).lower().find(default) != -1):
found_accounts.append(account)
continue
if (security.decrypt(user_password,account.description).lower().find(default) != -1):
found_accounts.append(account)
continue
return found_accounts
def find_account_custom(self,user_password,name,title,login,site,description,user):
accounts = user.accounts
found_accounts = []
for account in accounts:
if (name is not None and security.decrypt(user_password,account.name).lower().find(name.lower()) != -1):
found_accounts.append(account)
continue
if (title is not None and security.decrypt(user_password,account.title).lower().find(title.lower()) != -1):
found_accounts.append(account)
continue
if (login is not None and security.decrypt(user_password,account.login).lower().find(login.lower()) != -1):
found_accounts.append(account)
continue
if (site is not None and security.decrypt(user_password,account.site).lower().find(site.lower()) != -1):
found_accounts.append(account)
continue
if (description is not None and security.decrypt(user_password,account.description).lower().find(description.lower()) != -1):
found_accounts.append(account)
continue
return found_accounts
def delete_account(self,account):
account.delete()
commit()
def update(self,name,title,login,password,site,description,user_password,account):
if name is not None:
account.name = security.encrypt(user_password,name)
if title is not None:
account.title = security.encrypt(user_password,title)
if login is not None:
account.login = security.encrypt(user_password,login)
if password is not None:
account.password = security.encrypt(user_password,password)
if site is not None:
account.site = security.encrypt(user_password,site)
if description is not None:
account.description - security.encrypt(user_password,description)
commit()
class AuthenticationService:
def __init__(self):
self.user = None
self.message_error = None
self.time_login = None
self.logged = False
self.typed_password = None
self.time_session = 1
def authenticate(self,email,password):
userService = UserService()
self.user = userService.get_user(email)
if self.user is None:
self.message_error = messages.authentication_email_error
return False
if security.password_matches(password,self.user.password):
self.time_login = datetime.now()
self.logged = True
self.typed_password = password
return True
else:
self.message_error = messages.authentication_password_error
return False
def get_login(self):
return self.user.login
def password_is_right(self,password):
return security.password_matches(password,self.user.password)
def logout(self):
self.user = None
self.message_error = None
self.time_login = None
self.logged = False
self.typed_password = None
def session_is_expired(self):
if datetime.now()-timedelta(minutes=self.time_session) >= self.time_login:
return True
else:
return False
def info_session(self):
begin = self.time_login
end = self.time_login + timedelta(minutes=self.time_session)
return (begin,end)
|
[
"#! /usr/bin/python\n# -*- coding: utf-8 -*- \n\nfrom elixir import *\nfrom security import Security\nimport messages\nimport gettext \nimport locale\nfrom entity import User\nfrom entity import Account\nimport service\nfrom datetime import datetime,timedelta\n\nsecurity = Security()\nmetadata.bind = 'sqlite:///accounts.sqlite'\nmetadata.bind.encoding = 'utf-8'\nmetadata.bind.echo = False\nsetup_all()\ncreate_all()\ncommit = session.commit\n\nclass UserService:\n \n def add(self,name,email,password):\n user = User(name=name,email=email,password=security.password_hash(password))\n commit()\n \n def get_user(self,email):\n return User.query.filter(User.email==email).first()\n \n def get_accounts(self,user):\n return user.accounts\n\n def update_password(self,user,old_password,new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password,account.name)\n account.name = security.encrypt(new_password,name)\n\n title = security.decrypt(old_password,account.title)\n account.title = security.encrypt(new_password,title)\n\n login = security.decrypt(old_password,account.login)\n account.login = security.encrypt(new_password,login)\n\n password = security.decrypt(old_password,account.password)\n account.password = security.encrypt(new_password,password)\n\n site = security.decrypt(old_password,account.site)\n account.site = security.encrypt(new_password,site)\n\n description = security.decrypt(old_password,account.description)\n account.description = security.encrypt(new_password,description)\n user.password = security.password_hash(new_password)\n commit()\n \n def update_email(self,user,email):\n user.email = email\n commit()\n\n def update_name(self,user,name):\n user.name = name\n commit()\n\nclass AccountService:\n \n def add(self,name,title,login,password,site,description,user_password,user):\n name = security.encrypt(user_password,name)\n title = security.encrypt(user_password,title)\n login = security.encrypt(user_password,login)\n password = security.encrypt(user_password,password)\n site = security.encrypt(user_password,site)\n description = security.encrypt(user_password,description)\n account = Account(name=name,title=title,login=login,password=password,site=site,description=description,user=user)\n commit()\n \n def get_account(self,user_password,name,user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password,account.name) == name:\n return account\n return None\n \n def find_account(self,user_password,default,user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if (security.decrypt(user_password,account.name).lower().find(default) != -1):\n found_accounts.append(account)\n continue\n if (security.decrypt(user_password,account.title).lower().find(default) != -1):\n found_accounts.append(account)\n continue\n if (security.decrypt(user_password,account.login).lower().find(default) != -1):\n found_accounts.append(account)\n continue\n if (security.decrypt(user_password,account.site).lower().find(default) != -1):\n found_accounts.append(account)\n continue\n if (security.decrypt(user_password,account.description).lower().find(default) != -1):\n found_accounts.append(account)\n continue\n return found_accounts\n \n def find_account_custom(self,user_password,name,title,login,site,description,user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if (name is not None and security.decrypt(user_password,account.name).lower().find(name.lower()) != -1):\n found_accounts.append(account)\n continue\n if (title is not None and security.decrypt(user_password,account.title).lower().find(title.lower()) != -1):\n found_accounts.append(account)\n continue\n if (login is not None and security.decrypt(user_password,account.login).lower().find(login.lower()) != -1):\n found_accounts.append(account)\n continue\n if (site is not None and security.decrypt(user_password,account.site).lower().find(site.lower()) != -1):\n found_accounts.append(account)\n continue\n if (description is not None and security.decrypt(user_password,account.description).lower().find(description.lower()) != -1):\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self,account):\n account.delete()\n commit()\n\n def update(self,name,title,login,password,site,description,user_password,account):\n if name is not None:\n account.name = security.encrypt(user_password,name)\n if title is not None:\n account.title = security.encrypt(user_password,title)\n if login is not None:\n account.login = security.encrypt(user_password,login)\n if password is not None:\n account.password = security.encrypt(user_password,password)\n if site is not None:\n account.site = security.encrypt(user_password,site)\n if description is not None:\n account.description - security.encrypt(user_password,description)\n commit()\n\n \nclass AuthenticationService:\n \n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n \n def authenticate(self,email,password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password,self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n \n def get_login(self):\n return self.user.login\n\n def password_is_right(self,password):\n return security.password_matches(password,self.user.password) \n \n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n \n def session_is_expired(self):\n if datetime.now()-timedelta(minutes=self.time_session) >= self.time_login:\n return True\n else:\n return False\n \n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return (begin,end)\n \n \n \n \n",
"from elixir import *\nfrom security import Security\nimport messages\nimport gettext\nimport locale\nfrom entity import User\nfrom entity import Account\nimport service\nfrom datetime import datetime, timedelta\nsecurity = Security()\nmetadata.bind = 'sqlite:///accounts.sqlite'\nmetadata.bind.encoding = 'utf-8'\nmetadata.bind.echo = False\nsetup_all()\ncreate_all()\ncommit = session.commit\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n\n def get_user(self, email):\n return User.query.filter(User.email == email).first()\n\n def get_accounts(self, user):\n return user.accounts\n\n def update_password(self, user, old_password, new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password, account.name)\n account.name = security.encrypt(new_password, name)\n title = security.decrypt(old_password, account.title)\n account.title = security.encrypt(new_password, title)\n login = security.decrypt(old_password, account.login)\n account.login = security.encrypt(new_password, login)\n password = security.decrypt(old_password, account.password)\n account.password = security.encrypt(new_password, password)\n site = security.decrypt(old_password, account.site)\n account.site = security.encrypt(new_password, site)\n description = security.decrypt(old_password, account.description)\n account.description = security.encrypt(new_password, description)\n user.password = security.password_hash(new_password)\n commit()\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\nsecurity = Security()\nmetadata.bind = 'sqlite:///accounts.sqlite'\nmetadata.bind.encoding = 'utf-8'\nmetadata.bind.echo = False\nsetup_all()\ncreate_all()\ncommit = session.commit\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n\n def get_user(self, email):\n return User.query.filter(User.email == email).first()\n\n def get_accounts(self, user):\n return user.accounts\n\n def update_password(self, user, old_password, new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password, account.name)\n account.name = security.encrypt(new_password, name)\n title = security.decrypt(old_password, account.title)\n account.title = security.encrypt(new_password, title)\n login = security.decrypt(old_password, account.login)\n account.login = security.encrypt(new_password, login)\n password = security.decrypt(old_password, account.password)\n account.password = security.encrypt(new_password, password)\n site = security.decrypt(old_password, account.site)\n account.site = security.encrypt(new_password, site)\n description = security.decrypt(old_password, account.description)\n account.description = security.encrypt(new_password, description)\n user.password = security.password_hash(new_password)\n commit()\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\nsetup_all()\ncreate_all()\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n\n def get_user(self, email):\n return User.query.filter(User.email == email).first()\n\n def get_accounts(self, user):\n return user.accounts\n\n def update_password(self, user, old_password, new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password, account.name)\n account.name = security.encrypt(new_password, name)\n title = security.decrypt(old_password, account.title)\n account.title = security.encrypt(new_password, title)\n login = security.decrypt(old_password, account.login)\n account.login = security.encrypt(new_password, login)\n password = security.decrypt(old_password, account.password)\n account.password = security.encrypt(new_password, password)\n site = security.decrypt(old_password, account.site)\n account.site = security.encrypt(new_password, site)\n description = security.decrypt(old_password, account.description)\n account.description = security.encrypt(new_password, description)\n user.password = security.password_hash(new_password)\n commit()\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n\n def get_user(self, email):\n return User.query.filter(User.email == email).first()\n\n def get_accounts(self, user):\n return user.accounts\n\n def update_password(self, user, old_password, new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password, account.name)\n account.name = security.encrypt(new_password, name)\n title = security.decrypt(old_password, account.title)\n account.title = security.encrypt(new_password, title)\n login = security.decrypt(old_password, account.login)\n account.login = security.encrypt(new_password, login)\n password = security.decrypt(old_password, account.password)\n account.password = security.encrypt(new_password, password)\n site = security.decrypt(old_password, account.site)\n account.site = security.encrypt(new_password, site)\n description = security.decrypt(old_password, account.description)\n account.description = security.encrypt(new_password, description)\n user.password = security.password_hash(new_password)\n commit()\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n <function token>\n\n def get_accounts(self, user):\n return user.accounts\n\n def update_password(self, user, old_password, new_password):\n accounts = user.accounts\n for account in accounts:\n name = security.decrypt(old_password, account.name)\n account.name = security.encrypt(new_password, name)\n title = security.decrypt(old_password, account.title)\n account.title = security.encrypt(new_password, title)\n login = security.decrypt(old_password, account.login)\n account.login = security.encrypt(new_password, login)\n password = security.decrypt(old_password, account.password)\n account.password = security.encrypt(new_password, password)\n site = security.decrypt(old_password, account.site)\n account.site = security.encrypt(new_password, site)\n description = security.decrypt(old_password, account.description)\n account.description = security.encrypt(new_password, description)\n user.password = security.password_hash(new_password)\n commit()\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n <function token>\n\n def get_accounts(self, user):\n return user.accounts\n <function token>\n\n def update_email(self, user, email):\n user.email = email\n commit()\n\n def update_name(self, user, name):\n user.name = name\n commit()\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n <function token>\n\n def get_accounts(self, user):\n return user.accounts\n <function token>\n\n def update_email(self, user, email):\n user.email = email\n commit()\n <function token>\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n\n def add(self, name, email, password):\n user = User(name=name, email=email, password=security.password_hash\n (password))\n commit()\n <function token>\n <function token>\n <function token>\n\n def update_email(self, user, email):\n user.email = email\n commit()\n <function token>\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n <function token>\n <function token>\n <function token>\n <function token>\n\n def update_email(self, user, email):\n user.email = email\n commit()\n <function token>\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\nclass UserService:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n\n def add(self, name, title, login, password, site, description,\n user_password, user):\n name = security.encrypt(user_password, name)\n title = security.encrypt(user_password, title)\n login = security.encrypt(user_password, login)\n password = security.encrypt(user_password, password)\n site = security.encrypt(user_password, site)\n description = security.encrypt(user_password, description)\n account = Account(name=name, title=title, login=login, password=\n password, site=site, description=description, user=user)\n commit()\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n\n def update(self, name, title, login, password, site, description,\n user_password, account):\n if name is not None:\n account.name = security.encrypt(user_password, name)\n if title is not None:\n account.title = security.encrypt(user_password, title)\n if login is not None:\n account.login = security.encrypt(user_password, login)\n if password is not None:\n account.password = security.encrypt(user_password, password)\n if site is not None:\n account.site = security.encrypt(user_password, site)\n if description is not None:\n account.description - security.encrypt(user_password, description)\n commit()\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n\n def find_account(self, user_password, default, user):\n accounts = user.accounts\n found_accounts = []\n default = default.lower()\n for account in accounts:\n if security.decrypt(user_password, account.name).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.title).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.login).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.site).lower().find(\n default) != -1:\n found_accounts.append(account)\n continue\n if security.decrypt(user_password, account.description).lower(\n ).find(default) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n <function token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n <function token>\n\n def find_account_custom(self, user_password, name, title, login, site,\n description, user):\n accounts = user.accounts\n found_accounts = []\n for account in accounts:\n if name is not None and security.decrypt(user_password, account\n .name).lower().find(name.lower()) != -1:\n found_accounts.append(account)\n continue\n if title is not None and security.decrypt(user_password,\n account.title).lower().find(title.lower()) != -1:\n found_accounts.append(account)\n continue\n if login is not None and security.decrypt(user_password,\n account.login).lower().find(login.lower()) != -1:\n found_accounts.append(account)\n continue\n if site is not None and security.decrypt(user_password, account\n .site).lower().find(site.lower()) != -1:\n found_accounts.append(account)\n continue\n if description is not None and security.decrypt(user_password,\n account.description).lower().find(description.lower()) != -1:\n found_accounts.append(account)\n continue\n return found_accounts\n\n def delete_account(self, account):\n account.delete()\n commit()\n <function token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n <function token>\n <function token>\n\n def delete_account(self, account):\n account.delete()\n commit()\n <function token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n\n def get_account(self, user_password, name, user):\n accounts = user.accounts\n for account in accounts:\n if security.decrypt(user_password, account.name) == name:\n return account\n return None\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n\n\nclass AccountService:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n\n def password_is_right(self, password):\n return security.password_matches(password, self.user.password)\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n <function token>\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n\n def session_is_expired(self):\n if datetime.now() - timedelta(minutes=self.time_session\n ) >= self.time_login:\n return True\n else:\n return False\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n <function token>\n\n def logout(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n <function token>\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n\n def __init__(self):\n self.user = None\n self.message_error = None\n self.time_login = None\n self.logged = False\n self.typed_password = None\n self.time_session = 1\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n <function token>\n <function token>\n <function token>\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n <function token>\n\n def authenticate(self, email, password):\n userService = UserService()\n self.user = userService.get_user(email)\n if self.user is None:\n self.message_error = messages.authentication_email_error\n return False\n if security.password_matches(password, self.user.password):\n self.time_login = datetime.now()\n self.logged = True\n self.typed_password = password\n return True\n else:\n self.message_error = messages.authentication_password_error\n return False\n\n def get_login(self):\n return self.user.login\n <function token>\n <function token>\n <function token>\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n <function token>\n <function token>\n\n def get_login(self):\n return self.user.login\n <function token>\n <function token>\n <function token>\n\n def info_session(self):\n begin = self.time_login\n end = self.time_login + timedelta(minutes=self.time_session)\n return begin, end\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n <function token>\n <function token>\n\n def get_login(self):\n return self.user.login\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass AuthenticationService:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,051 |
ec02813189adac24de0a29f3a31af31e77e89a6c
|
#!/usr/bin/env python
import json
files = [
"authorasproducer.txt",
"barthes.txt",
"construction.txt",
"designerasauthor.txt",
"fitz.txt",
"foucalt.txt",
"laurenrieders.txt",
"martha.txt",
"michaelrock.txt",
"samreith.txt",
"schopenhauer.txt",
"shakespeare.txt"
]
data = {}
for filename in files:
with open(filename) as f:
text = f.read()
lines = []
prev = -1
for (i, c) in enumerate(text):
if c in ['.', '?', '!', ';']:
sentence = text[prev + 1:i + 1]
sentence = sentence.strip()
# print sentence
lines.append(sentence)
prev = i
data[filename] = lines
jsondata = json.dumps(data, indent=2)
with open("authorship.json", 'w') as jf:
jf.write(jsondata)
|
[
"#!/usr/bin/env python\n\nimport json\n\nfiles = [\n \"authorasproducer.txt\",\n \"barthes.txt\",\n \"construction.txt\",\n \"designerasauthor.txt\",\n \"fitz.txt\",\n \"foucalt.txt\",\n \"laurenrieders.txt\",\n \"martha.txt\",\n \"michaelrock.txt\",\n \"samreith.txt\",\n \"schopenhauer.txt\",\n \"shakespeare.txt\"\n]\n\ndata = {}\n\nfor filename in files:\n\n with open(filename) as f:\n text = f.read()\n\n lines = []\n prev = -1\n for (i, c) in enumerate(text):\n if c in ['.', '?', '!', ';']:\n sentence = text[prev + 1:i + 1]\n sentence = sentence.strip()\n # print sentence\n lines.append(sentence)\n prev = i\n data[filename] = lines\n\njsondata = json.dumps(data, indent=2)\nwith open(\"authorship.json\", 'w') as jf:\n jf.write(jsondata)\n",
"import json\nfiles = ['authorasproducer.txt', 'barthes.txt', 'construction.txt',\n 'designerasauthor.txt', 'fitz.txt', 'foucalt.txt', 'laurenrieders.txt',\n 'martha.txt', 'michaelrock.txt', 'samreith.txt', 'schopenhauer.txt',\n 'shakespeare.txt']\ndata = {}\nfor filename in files:\n with open(filename) as f:\n text = f.read()\n lines = []\n prev = -1\n for i, c in enumerate(text):\n if c in ['.', '?', '!', ';']:\n sentence = text[prev + 1:i + 1]\n sentence = sentence.strip()\n lines.append(sentence)\n prev = i\n data[filename] = lines\njsondata = json.dumps(data, indent=2)\nwith open('authorship.json', 'w') as jf:\n jf.write(jsondata)\n",
"<import token>\nfiles = ['authorasproducer.txt', 'barthes.txt', 'construction.txt',\n 'designerasauthor.txt', 'fitz.txt', 'foucalt.txt', 'laurenrieders.txt',\n 'martha.txt', 'michaelrock.txt', 'samreith.txt', 'schopenhauer.txt',\n 'shakespeare.txt']\ndata = {}\nfor filename in files:\n with open(filename) as f:\n text = f.read()\n lines = []\n prev = -1\n for i, c in enumerate(text):\n if c in ['.', '?', '!', ';']:\n sentence = text[prev + 1:i + 1]\n sentence = sentence.strip()\n lines.append(sentence)\n prev = i\n data[filename] = lines\njsondata = json.dumps(data, indent=2)\nwith open('authorship.json', 'w') as jf:\n jf.write(jsondata)\n",
"<import token>\n<assignment token>\nfor filename in files:\n with open(filename) as f:\n text = f.read()\n lines = []\n prev = -1\n for i, c in enumerate(text):\n if c in ['.', '?', '!', ';']:\n sentence = text[prev + 1:i + 1]\n sentence = sentence.strip()\n lines.append(sentence)\n prev = i\n data[filename] = lines\n<assignment token>\nwith open('authorship.json', 'w') as jf:\n jf.write(jsondata)\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,052 |
1de36d84c28d8457c5c5674e267fdaed781939e2
|
# JobHunter
# This script pulls from a job website and stores positions into a database. If there is a new posting it notifies the user.
# CNA 330
# Carlos Del Villar, [email protected]
# collaborated with Eric, Dyllan, youtube.
import urllib.request
import mysql.connector
# Connect to database
# You may need to edit the connect function based on your local settings.
def connect_to_sql():
conn = mysql.connector.connect (user='root', password='',
host='127.0.0.1',
database='jobhunter')
return conn
# Create the table structure
def create_tables(cursor, table):
cursor.execute ('''CREATE TABLE IF NOT EXISTS Jobs_found (id INT PRIMARY KEY auto_increment,
Type varchar(10), Title varchar(100), Description TEXT CHARSET utf8a, Job_id varchar(36),
Created_at DATE, Company varchar(100), location varchar(50),
How_to_apply varchar(1000)); ''')
return
# Query the database.
# You should not need to edit anything in this function
def query_sql(cursor, query):
cursor.execute (query)
return cursor
# Add a new job
def add_new_job(cursor, jobdetails):
Type = jobdetails['Type']
Title = jobdetails['Title']
Description = jobdetails['Description']
Job_ID = jobdetails['Job_ID']
Created_At = jobdetails['Created_At']
Company = jobdetails['Company']
Location = jobdetails['Location']
How_To_Apply = jobdetails['How_To_Apply']
query = cursor.execute (
"INSERT INTO jobs(ID, Type, Title, Description, Job_ID, Created_at, Company, Location, How_to_apply" ")"
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
(Type, Title, Description, Job_ID, Created_At, Company, Location, How_To_Apply))
return query_sql (cursor, query)
# Check if new job
def check_if_job_exists(cursor, jobdetails):
## Add your code here
Job_ID = jobdetails['ID']
query = "SELECT * FROM jobs WHERE Job_ID = \"%s\"" % Job_ID
return query_sql (cursor, query)
def delete_job(cursor, jobdetails):
## Add your code here
Job_ID = jobdetails['ID']
query = "DELETE FROM jobs WHERE Job_ID = \"%s\"" % Job_ID
return query_sql (cursor, query)
# Grab new jobs from a website
def fetch_new_jobs(arg_dict):
# Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/Sql.py
query = "https://jobs.github.com/positions.json?" + "location=remote" ## Add arguments here
jsonpage = 0
try:
contents = urllib.request.urlopen (query)
response = contents.read ()
jsonpage = json.loads (response)
except:
pass
return jsonpage
# Load a text-based configuration file
def load_config_file(filename):
argument_dictionary = 0
# Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/FileIO.py
rel_path = os.path.abspath (os.path.dirname (__file__))
file = 0
file_contents = 0
try:
file = open (filename, "r")
file_contents = file.read ()
except FileNotFoundError:
print ("File not found, it will be created.")
file = open (filename, "w")
file.write ("")
file.close ()
## Add in information for argument dictionary
return argument_dictionary
# Main area of the code.
def jobhunt(cursor, arg_dict, add_or_delete_jobs=None):
# Fetch jobs from website
jobpage = fetch_new_jobs (arg_dict)
# print (jobpage)
add_or_delete_jobs (jobpage, cursor)
## Add your code here to parse the job page
## Add in your code here to check if the job already exists in the DB
## Add in your code here to notify the user of a new posting
## EXTRA CREDIT: Add your code to delete old entries
# Setup portion of the program. Take arguments and set up the script
# You should not need to edit anything here.
def add_or_delete_job(jobpage, cursor)
def main():
# Connect to SQL and get cursor
conn = connect_to_sql ()
cursor = conn.cursor ()
create_tables (cursor, "table")
# Load text file and store arguments into dictionary
arg_dict = 0
while (1):
jobhunt (cursor, arg_dict)
conn.commit ()
time.sleep (3600) # Sleep for 1h
if __name__ == '__main__':
main ()
# JobHunter
# This script pulls from a job website and stores positions into a database. If there is a new posting it notifies the user.
# CNA 330
# Carlos Del Villar, [email protected]
# collaborated with Eric, Dyllan, youtube.
import mysql.connector
import json
import urllib.request
import os
import time
# Connect to database
# You may need to edit the connect function based on your local settings.
def connect_to_sql():
conn = mysql.connector.connect (user='root', password='',
host='127.0.0.1',
database='jobhunter')
return conn
# Create the table structure
def create_tables(cursor, table):
cursor.execute ('''CREATE TABLE IF NOT EXISTS Jobs_found (id INT PRIMARY KEY auto_increment,
Type varchar(10), Title varchar(100), Description TEXT CHARSET utf8a, Job_id varchar(36),
Created_at DATE, Company varchar(100), location varchar(50),
How_to_apply varchar(1000)); ''')
return
# Query the database.
# You should not need to edit anything in this function
def query_sql(cursor, query):
cursor.execute (query)
return cursor
# Add a new job
def add_new_job(cursor, jobdetails):
Type = jobdetails['Type']
Title = jobdetails['Title']
Description = jobdetails['Description']
Job_ID = jobdetails['Job_ID']
Created_At = jobdetails['Created_At']
Company = jobdetails['Company']
Location = jobdetails['Location']
How_To_Apply = jobdetails['How_To_Apply']
query = cursor.execute (
"INSERT INTO jobs(ID, Type, Title, Description, Job_ID, Created_at, Company, Location, How_to_apply" ")"
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s)",
(Type, Title, Description, Job_ID, Created_At, Company, Location, How_To_Apply))
return query_sql (cursor, query)
# Check if new job
def check_if_job_exists(cursor, jobdetails):
## Add your code here
Job_ID = jobdetails['ID']
query = "SELECT * FROM jobs WHERE Job_ID = \"%s\"" % Job_ID
return query_sql (cursor, query)
def delete_job(cursor, jobdetails):
## Add your code here
Job_ID = jobdetails['ID']
query = "DELETE FROM jobs WHERE Job_ID = \"%s\"" % Job_ID
return query_sql (cursor, query)
# Grab new jobs from a website
def fetch_new_jobs(arg_dict):
# Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/Sql.py
query = "https://jobs.github.com/positions.json?" + "location=remote" ## Add arguments here
jsonpage = 0
try:
contents = urllib.request.urlopen (query)
response = contents.read ()
jsonpage = json.loads (response)
except:
pass
return jsonpage
# Load a text-based configuration file
def load_config_file(filename):
argument_dictionary = 0
# Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/FileIO.py
file = 0
try:
pass
except FileNotFoundError:
print ("File not found, it will be created.")
file = open (filename, "w")
file.write ("")
file.close ()
## Add in information for argument dictionary
return argument_dictionary
# Main area of the code.
def jobhunt(cursor, arg_dict, add_or_delete_jobs=None):
# Fetch jobs from website
jobpage = fetch_new_jobs (arg_dict)
# print (jobpage)
add_or_delete_jobs (jobpage, cursor)
## Add your code here to parse the job page
## Add in your code here to check if the job already exists in the DB
## Add in your code here to notify the user of a new posting
## EXTRA CREDIT: Add your code to delete old entries
# Setup portion of the program. Take arguments and set up the script
# You should not need to edit anything here.
def add_or_delete_job(cursor):
def main():
# Connect to SQL and get cursor
conn = connect_to_sql ()
cursor = conn.cursor ()
create_tables (cursor, "table")
# Load text file and store arguments into dictionary
arg_dict = 0
while (1):
jobhunt (cursor, arg_dict)
conn.commit ()
time.sleep (3600) # Sleep for 1h
if __name__ == '__main__':
main ()
|
[
"# JobHunter\r\n# This script pulls from a job website and stores positions into a database. If there is a new posting it notifies the user.\r\n# CNA 330\r\n# Carlos Del Villar, [email protected]\r\n# collaborated with Eric, Dyllan, youtube.\r\n\r\nimport urllib.request\r\n\r\nimport mysql.connector\r\n\r\n\r\n# Connect to database\r\n# You may need to edit the connect function based on your local settings.\r\ndef connect_to_sql():\r\n conn = mysql.connector.connect (user='root', password='',\r\n host='127.0.0.1',\r\n database='jobhunter')\r\n return conn\r\n\r\n\r\n# Create the table structure\r\ndef create_tables(cursor, table):\r\n cursor.execute ('''CREATE TABLE IF NOT EXISTS Jobs_found (id INT PRIMARY KEY auto_increment,\r\n Type varchar(10), Title varchar(100), Description TEXT CHARSET utf8a, Job_id varchar(36),\r\n Created_at DATE, Company varchar(100), location varchar(50),\r\n How_to_apply varchar(1000)); ''')\r\n return\r\n\r\n # Query the database.\r\n\r\n\r\n# You should not need to edit anything in this function\r\ndef query_sql(cursor, query):\r\n cursor.execute (query)\r\n return cursor\r\n\r\n\r\n# Add a new job\r\ndef add_new_job(cursor, jobdetails):\r\n Type = jobdetails['Type']\r\n Title = jobdetails['Title']\r\n Description = jobdetails['Description']\r\n Job_ID = jobdetails['Job_ID']\r\n Created_At = jobdetails['Created_At']\r\n Company = jobdetails['Company']\r\n Location = jobdetails['Location']\r\n How_To_Apply = jobdetails['How_To_Apply']\r\n query = cursor.execute (\r\n \"INSERT INTO jobs(ID, Type, Title, Description, Job_ID, Created_at, Company, Location, How_to_apply\" \")\"\r\n \"VALUES(%s,%s,%s,%s,%s,%s,%s,%s)\",\r\n (Type, Title, Description, Job_ID, Created_At, Company, Location, How_To_Apply))\r\n\r\n return query_sql (cursor, query)\r\n\r\n\r\n# Check if new job\r\ndef check_if_job_exists(cursor, jobdetails):\r\n ## Add your code here\r\n Job_ID = jobdetails['ID']\r\n query = \"SELECT * FROM jobs WHERE Job_ID = \\\"%s\\\"\" % Job_ID\r\n return query_sql (cursor, query)\r\n\r\n\r\ndef delete_job(cursor, jobdetails):\r\n ## Add your code here\r\n Job_ID = jobdetails['ID']\r\n query = \"DELETE FROM jobs WHERE Job_ID = \\\"%s\\\"\" % Job_ID\r\n return query_sql (cursor, query)\r\n\r\n\r\n# Grab new jobs from a website\r\ndef fetch_new_jobs(arg_dict):\r\n # Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/Sql.py\r\n query = \"https://jobs.github.com/positions.json?\" + \"location=remote\" ## Add arguments here\r\n jsonpage = 0\r\n try:\r\n contents = urllib.request.urlopen (query)\r\n response = contents.read ()\r\n jsonpage = json.loads (response)\r\n except:\r\n pass\r\n return jsonpage\r\n\r\n\r\n# Load a text-based configuration file\r\ndef load_config_file(filename):\r\n argument_dictionary = 0\r\n # Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/FileIO.py\r\n rel_path = os.path.abspath (os.path.dirname (__file__))\r\n file = 0\r\n file_contents = 0\r\n try:\r\n file = open (filename, \"r\")\r\n file_contents = file.read ()\r\n except FileNotFoundError:\r\n print (\"File not found, it will be created.\")\r\n file = open (filename, \"w\")\r\n file.write (\"\")\r\n file.close ()\r\n\r\n ## Add in information for argument dictionary\r\n return argument_dictionary\r\n\r\n\r\n# Main area of the code.\r\ndef jobhunt(cursor, arg_dict, add_or_delete_jobs=None):\r\n # Fetch jobs from website\r\n jobpage = fetch_new_jobs (arg_dict)\r\n # print (jobpage)\r\n add_or_delete_jobs (jobpage, cursor)\r\n ## Add your code here to parse the job page\r\n\r\n ## Add in your code here to check if the job already exists in the DB\r\n\r\n ## Add in your code here to notify the user of a new posting\r\n\r\n ## EXTRA CREDIT: Add your code to delete old entries\r\n\r\n\r\n# Setup portion of the program. Take arguments and set up the script\r\n# You should not need to edit anything here.\r\ndef add_or_delete_job(jobpage, cursor)\r\n\r\n\r\ndef main():\r\n # Connect to SQL and get cursor\r\n conn = connect_to_sql ()\r\n cursor = conn.cursor ()\r\n create_tables (cursor, \"table\")\r\n # Load text file and store arguments into dictionary\r\n arg_dict = 0\r\n while (1):\r\n jobhunt (cursor, arg_dict)\r\n conn.commit ()\r\n time.sleep (3600) # Sleep for 1h\r\n\r\n\r\nif __name__ == '__main__':\r\n main ()\r\n# JobHunter\r\n# This script pulls from a job website and stores positions into a database. If there is a new posting it notifies the user.\r\n# CNA 330\r\n# Carlos Del Villar, [email protected]\r\n# collaborated with Eric, Dyllan, youtube.\r\n\r\nimport mysql.connector\r\nimport json\r\nimport urllib.request\r\nimport os\r\nimport time\r\n\r\n\r\n# Connect to database\r\n# You may need to edit the connect function based on your local settings.\r\ndef connect_to_sql():\r\n conn = mysql.connector.connect (user='root', password='',\r\n host='127.0.0.1',\r\n database='jobhunter')\r\n return conn\r\n\r\n\r\n# Create the table structure\r\ndef create_tables(cursor, table):\r\n cursor.execute ('''CREATE TABLE IF NOT EXISTS Jobs_found (id INT PRIMARY KEY auto_increment,\r\n Type varchar(10), Title varchar(100), Description TEXT CHARSET utf8a, Job_id varchar(36),\r\n Created_at DATE, Company varchar(100), location varchar(50),\r\n How_to_apply varchar(1000)); ''')\r\n return\r\n\r\n # Query the database.\r\n\r\n\r\n# You should not need to edit anything in this function\r\ndef query_sql(cursor, query):\r\n cursor.execute (query)\r\n return cursor\r\n\r\n\r\n# Add a new job\r\ndef add_new_job(cursor, jobdetails):\r\n Type = jobdetails['Type']\r\n Title = jobdetails['Title']\r\n Description = jobdetails['Description']\r\n Job_ID = jobdetails['Job_ID']\r\n Created_At = jobdetails['Created_At']\r\n Company = jobdetails['Company']\r\n Location = jobdetails['Location']\r\n How_To_Apply = jobdetails['How_To_Apply']\r\n query = cursor.execute (\r\n \"INSERT INTO jobs(ID, Type, Title, Description, Job_ID, Created_at, Company, Location, How_to_apply\" \")\"\r\n \"VALUES(%s,%s,%s,%s,%s,%s,%s,%s)\",\r\n (Type, Title, Description, Job_ID, Created_At, Company, Location, How_To_Apply))\r\n\r\n return query_sql (cursor, query)\r\n\r\n\r\n# Check if new job\r\ndef check_if_job_exists(cursor, jobdetails):\r\n ## Add your code here\r\n Job_ID = jobdetails['ID']\r\n query = \"SELECT * FROM jobs WHERE Job_ID = \\\"%s\\\"\" % Job_ID\r\n return query_sql (cursor, query)\r\n\r\n\r\ndef delete_job(cursor, jobdetails):\r\n ## Add your code here\r\n Job_ID = jobdetails['ID']\r\n query = \"DELETE FROM jobs WHERE Job_ID = \\\"%s\\\"\" % Job_ID\r\n return query_sql (cursor, query)\r\n\r\n\r\n# Grab new jobs from a website\r\ndef fetch_new_jobs(arg_dict):\r\n # Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/Sql.py\r\n query = \"https://jobs.github.com/positions.json?\" + \"location=remote\" ## Add arguments here\r\n jsonpage = 0\r\n try:\r\n contents = urllib.request.urlopen (query)\r\n response = contents.read ()\r\n jsonpage = json.loads (response)\r\n except:\r\n pass\r\n return jsonpage\r\n\r\n\r\n# Load a text-based configuration file\r\ndef load_config_file(filename):\r\n argument_dictionary = 0\r\n # Code from https://github.com/RTCedu/CNA336/blob/master/Spring2018/FileIO.py\r\n file = 0\r\n try:\r\n pass\r\n except FileNotFoundError:\r\n print (\"File not found, it will be created.\")\r\n file = open (filename, \"w\")\r\n file.write (\"\")\r\n file.close ()\r\n\r\n ## Add in information for argument dictionary\r\n return argument_dictionary\r\n\r\n\r\n# Main area of the code.\r\ndef jobhunt(cursor, arg_dict, add_or_delete_jobs=None):\r\n # Fetch jobs from website\r\n jobpage = fetch_new_jobs (arg_dict)\r\n # print (jobpage)\r\n add_or_delete_jobs (jobpage, cursor)\r\n ## Add your code here to parse the job page\r\n\r\n ## Add in your code here to check if the job already exists in the DB\r\n\r\n ## Add in your code here to notify the user of a new posting\r\n\r\n ## EXTRA CREDIT: Add your code to delete old entries\r\n\r\n\r\n# Setup portion of the program. Take arguments and set up the script\r\n# You should not need to edit anything here.\r\ndef add_or_delete_job(cursor):\r\n\r\n\r\ndef main():\r\n # Connect to SQL and get cursor\r\n conn = connect_to_sql ()\r\n cursor = conn.cursor ()\r\n create_tables (cursor, \"table\")\r\n # Load text file and store arguments into dictionary\r\n arg_dict = 0\r\n while (1):\r\n jobhunt (cursor, arg_dict)\r\n conn.commit ()\r\n time.sleep (3600) # Sleep for 1h\r\n\r\n\r\nif __name__ == '__main__':\r\n main ()\r\n"
] | true |
99,053 |
00343a17990e6ef07f18c37079691c6d0a82dcfd
|
"""
Calculate distance and azimuth from to locator positions
Author: 9V1KG Klaus D Goepel
https://klsin.bpmsg.com
https://github.com/9V1KG/maidenhead
Created: 2020-05-02
License: http://www.fsf.org/copyleft/gpl.html
"""
import sys
import re
from openlocationcode import openlocationcode as olc
import maidenhead.maiden
from maidenhead.maiden import Maiden, Geodg2dms
MY_LOC = "PK04lc68dj"
COL = maidenhead.maiden.COL
MAIDEN = Maiden() # Initialize class
print("""
Maidenhead locator program by 9V1KG
https://github.com/9V1KG/maidenhead
""")
print(f"{COL.green}Calculates distance and azimuth from your locator (\"MY_LOC\"){COL.end}")
print(f"My locator: {MY_LOC}")
POS_A = MAIDEN.maiden2latlon(MY_LOC)
print(f"My pos: {POS_A} Lat/Lon")
PDMS_A = Geodg2dms(POS_A)
print(f"My pos: "
f"{PDMS_A.lat_deg} {PDMS_A.lat_min}'{PDMS_A.lat_sec}\"{PDMS_A.lat_dir}, "
f"{PDMS_A.lon_deg} {PDMS_A.lon_min}'{PDMS_A.lon_sec}\"{PDMS_A.lon_dir}"
)
opl = olc.encode(POS_A[0], POS_A[1])
print(f"Google map: {opl}\r\n")
line = input("Input Maidenhead Locator (4 to 10 char): ")
if not re.match(r"([A-Ra-r]{2}\d\d)(([A-Za-z]{2})(\d\d)?){0,2}", line):
print("Locator has 2 to 5 character/number pairs, like PK04lc")
sys.exit(1)
pos_b = MAIDEN.maiden2latlon(line)
print(f"Result: {COL.yellow}{pos_b}{COL.end} Lat/Lon")
pdms_b = Geodg2dms(pos_b)
print(f"Result: "
f"{pdms_b.lat_deg} {pdms_b.lat_min}'{pdms_b.lat_sec}\"{pdms_b.lat_dir}, "
f"{pdms_b.lon_deg} {pdms_b.lon_min}'{pdms_b.lon_sec}\"{pdms_b.lon_dir}"
)
opl = olc.encode(pos_b[0], pos_b[1])
print(f"Google map: {opl}")
betw = MAIDEN.dist_az(POS_A, pos_b)
print(f"Distance: {COL.yellow}{betw[0]} km{COL.end} "
f"Azimuth: {COL.yellow}{betw[1]} deg{COL.end}")
|
[
"\"\"\"\n Calculate distance and azimuth from to locator positions\n Author: 9V1KG Klaus D Goepel\n https://klsin.bpmsg.com\n https://github.com/9V1KG/maidenhead\n Created: 2020-05-02\n License: http://www.fsf.org/copyleft/gpl.html\n\"\"\"\nimport sys\nimport re\nfrom openlocationcode import openlocationcode as olc\nimport maidenhead.maiden\nfrom maidenhead.maiden import Maiden, Geodg2dms\n\nMY_LOC = \"PK04lc68dj\"\nCOL = maidenhead.maiden.COL\n\nMAIDEN = Maiden() # Initialize class\nprint(\"\"\"\nMaidenhead locator program by 9V1KG\nhttps://github.com/9V1KG/maidenhead\n \"\"\")\nprint(f\"{COL.green}Calculates distance and azimuth from your locator (\\\"MY_LOC\\\"){COL.end}\")\nprint(f\"My locator: {MY_LOC}\")\nPOS_A = MAIDEN.maiden2latlon(MY_LOC)\nprint(f\"My pos: {POS_A} Lat/Lon\")\nPDMS_A = Geodg2dms(POS_A)\nprint(f\"My pos: \"\n f\"{PDMS_A.lat_deg} {PDMS_A.lat_min}'{PDMS_A.lat_sec}\\\"{PDMS_A.lat_dir}, \"\n f\"{PDMS_A.lon_deg} {PDMS_A.lon_min}'{PDMS_A.lon_sec}\\\"{PDMS_A.lon_dir}\"\n )\nopl = olc.encode(POS_A[0], POS_A[1])\nprint(f\"Google map: {opl}\\r\\n\")\n\nline = input(\"Input Maidenhead Locator (4 to 10 char): \")\nif not re.match(r\"([A-Ra-r]{2}\\d\\d)(([A-Za-z]{2})(\\d\\d)?){0,2}\", line):\n print(\"Locator has 2 to 5 character/number pairs, like PK04lc\")\n sys.exit(1)\npos_b = MAIDEN.maiden2latlon(line)\nprint(f\"Result: {COL.yellow}{pos_b}{COL.end} Lat/Lon\")\npdms_b = Geodg2dms(pos_b)\nprint(f\"Result: \"\n f\"{pdms_b.lat_deg} {pdms_b.lat_min}'{pdms_b.lat_sec}\\\"{pdms_b.lat_dir}, \"\n f\"{pdms_b.lon_deg} {pdms_b.lon_min}'{pdms_b.lon_sec}\\\"{pdms_b.lon_dir}\"\n )\nopl = olc.encode(pos_b[0], pos_b[1])\nprint(f\"Google map: {opl}\")\nbetw = MAIDEN.dist_az(POS_A, pos_b)\nprint(f\"Distance: {COL.yellow}{betw[0]} km{COL.end} \"\n f\"Azimuth: {COL.yellow}{betw[1]} deg{COL.end}\")\n",
"<docstring token>\nimport sys\nimport re\nfrom openlocationcode import openlocationcode as olc\nimport maidenhead.maiden\nfrom maidenhead.maiden import Maiden, Geodg2dms\nMY_LOC = 'PK04lc68dj'\nCOL = maidenhead.maiden.COL\nMAIDEN = Maiden()\nprint(\n \"\"\"\nMaidenhead locator program by 9V1KG\nhttps://github.com/9V1KG/maidenhead\n \"\"\"\n )\nprint(\n f'{COL.green}Calculates distance and azimuth from your locator (\"MY_LOC\"){COL.end}'\n )\nprint(f'My locator: {MY_LOC}')\nPOS_A = MAIDEN.maiden2latlon(MY_LOC)\nprint(f'My pos: {POS_A} Lat/Lon')\nPDMS_A = Geodg2dms(POS_A)\nprint(\n f'My pos: {PDMS_A.lat_deg} {PDMS_A.lat_min}\\'{PDMS_A.lat_sec}\"{PDMS_A.lat_dir}, {PDMS_A.lon_deg} {PDMS_A.lon_min}\\'{PDMS_A.lon_sec}\"{PDMS_A.lon_dir}'\n )\nopl = olc.encode(POS_A[0], POS_A[1])\nprint(f'Google map: {opl}\\r\\n')\nline = input('Input Maidenhead Locator (4 to 10 char): ')\nif not re.match('([A-Ra-r]{2}\\\\d\\\\d)(([A-Za-z]{2})(\\\\d\\\\d)?){0,2}', line):\n print('Locator has 2 to 5 character/number pairs, like PK04lc')\n sys.exit(1)\npos_b = MAIDEN.maiden2latlon(line)\nprint(f'Result: {COL.yellow}{pos_b}{COL.end} Lat/Lon')\npdms_b = Geodg2dms(pos_b)\nprint(\n f'Result: {pdms_b.lat_deg} {pdms_b.lat_min}\\'{pdms_b.lat_sec}\"{pdms_b.lat_dir}, {pdms_b.lon_deg} {pdms_b.lon_min}\\'{pdms_b.lon_sec}\"{pdms_b.lon_dir}'\n )\nopl = olc.encode(pos_b[0], pos_b[1])\nprint(f'Google map: {opl}')\nbetw = MAIDEN.dist_az(POS_A, pos_b)\nprint(\n f'Distance: {COL.yellow}{betw[0]} km{COL.end} Azimuth: {COL.yellow}{betw[1]} deg{COL.end}'\n )\n",
"<docstring token>\n<import token>\nMY_LOC = 'PK04lc68dj'\nCOL = maidenhead.maiden.COL\nMAIDEN = Maiden()\nprint(\n \"\"\"\nMaidenhead locator program by 9V1KG\nhttps://github.com/9V1KG/maidenhead\n \"\"\"\n )\nprint(\n f'{COL.green}Calculates distance and azimuth from your locator (\"MY_LOC\"){COL.end}'\n )\nprint(f'My locator: {MY_LOC}')\nPOS_A = MAIDEN.maiden2latlon(MY_LOC)\nprint(f'My pos: {POS_A} Lat/Lon')\nPDMS_A = Geodg2dms(POS_A)\nprint(\n f'My pos: {PDMS_A.lat_deg} {PDMS_A.lat_min}\\'{PDMS_A.lat_sec}\"{PDMS_A.lat_dir}, {PDMS_A.lon_deg} {PDMS_A.lon_min}\\'{PDMS_A.lon_sec}\"{PDMS_A.lon_dir}'\n )\nopl = olc.encode(POS_A[0], POS_A[1])\nprint(f'Google map: {opl}\\r\\n')\nline = input('Input Maidenhead Locator (4 to 10 char): ')\nif not re.match('([A-Ra-r]{2}\\\\d\\\\d)(([A-Za-z]{2})(\\\\d\\\\d)?){0,2}', line):\n print('Locator has 2 to 5 character/number pairs, like PK04lc')\n sys.exit(1)\npos_b = MAIDEN.maiden2latlon(line)\nprint(f'Result: {COL.yellow}{pos_b}{COL.end} Lat/Lon')\npdms_b = Geodg2dms(pos_b)\nprint(\n f'Result: {pdms_b.lat_deg} {pdms_b.lat_min}\\'{pdms_b.lat_sec}\"{pdms_b.lat_dir}, {pdms_b.lon_deg} {pdms_b.lon_min}\\'{pdms_b.lon_sec}\"{pdms_b.lon_dir}'\n )\nopl = olc.encode(pos_b[0], pos_b[1])\nprint(f'Google map: {opl}')\nbetw = MAIDEN.dist_az(POS_A, pos_b)\nprint(\n f'Distance: {COL.yellow}{betw[0]} km{COL.end} Azimuth: {COL.yellow}{betw[1]} deg{COL.end}'\n )\n",
"<docstring token>\n<import token>\n<assignment token>\nprint(\n \"\"\"\nMaidenhead locator program by 9V1KG\nhttps://github.com/9V1KG/maidenhead\n \"\"\"\n )\nprint(\n f'{COL.green}Calculates distance and azimuth from your locator (\"MY_LOC\"){COL.end}'\n )\nprint(f'My locator: {MY_LOC}')\n<assignment token>\nprint(f'My pos: {POS_A} Lat/Lon')\n<assignment token>\nprint(\n f'My pos: {PDMS_A.lat_deg} {PDMS_A.lat_min}\\'{PDMS_A.lat_sec}\"{PDMS_A.lat_dir}, {PDMS_A.lon_deg} {PDMS_A.lon_min}\\'{PDMS_A.lon_sec}\"{PDMS_A.lon_dir}'\n )\n<assignment token>\nprint(f'Google map: {opl}\\r\\n')\n<assignment token>\nif not re.match('([A-Ra-r]{2}\\\\d\\\\d)(([A-Za-z]{2})(\\\\d\\\\d)?){0,2}', line):\n print('Locator has 2 to 5 character/number pairs, like PK04lc')\n sys.exit(1)\n<assignment token>\nprint(f'Result: {COL.yellow}{pos_b}{COL.end} Lat/Lon')\n<assignment token>\nprint(\n f'Result: {pdms_b.lat_deg} {pdms_b.lat_min}\\'{pdms_b.lat_sec}\"{pdms_b.lat_dir}, {pdms_b.lon_deg} {pdms_b.lon_min}\\'{pdms_b.lon_sec}\"{pdms_b.lon_dir}'\n )\n<assignment token>\nprint(f'Google map: {opl}')\n<assignment token>\nprint(\n f'Distance: {COL.yellow}{betw[0]} km{COL.end} Azimuth: {COL.yellow}{betw[1]} deg{COL.end}'\n )\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,054 |
0aac6ab4aabaed23ff811731830e192890897eb2
|
'''
1. create
1. add new single dataset (via console)
2. read
1. show all datasets
2. show single dataset
3. update
1. update single dataset
4. delete
1. delete all
2. delete single row
5. save/export
6. end program
'''
# This functions prints a main menu structure in the console.
def show_main_menu():
print("What would you like to do: ")
print("1. Create")
print("2. Read")
print("3. Update")
print("4. Delete")
print("5. End")
# This between min and max valuefunction checks if an integer is input
def ask_for_integer_input(min, max):
while True:
print("Please enter a number between",min, "and",max,"!")
answer = input("Input: ")
if answer.isdecimal():
if min > int(answer):
print("The entered value is too low.")
elif max < int(answer):
print("The entered value is too high.")
else:
return int(answer)
else:
print("The entered value is no integer.")
def peek_single_dataset(lines):
option3 = ask_for_integer_input(1, len(lines))
print(lines[int(option3)-1])
def show_all_datasets(lines):
max_sizes = []
for line in lines:
for i, token in enumerate(line.split(';')):
if len(max_sizes) <= i:
max_sizes.append(len(token.strip()))
elif len(token.strip()) > max_sizes[i]:
max_sizes[i] = len(token.strip())
for linenumber, line in enumerate(lines):
first = str(linenumber) if linenumber else " "
print(first.ljust(3),end=' ')
for i, token in enumerate(line.split(';')):
print(fr"{token.strip()}".ljust(max_sizes[i]+1,'.'), end='')
print()
def delete_single_dateset(lines):
pass
def delete_all_datasets(lines):
pass
def update_single_dataset(lines):
pass
def add_via_console(lines):
pass
def create_menu(create_lines):
pass
def read_menu(read_lines):
print("1. Show all")
print("2. Show single line")
option2 = ask_for_integer_input(1, 2)
if option2 == 1:
show_all_datasets(read_lines)
else:
peek_single_dataset(read_lines)
def update_menu(update_lines):
pass
def delete_menu(delete_lines):
pass
import pathlib # loads the library for object-oriented filesystem paths
current_folder = pathlib.Path(__file__).parent.absolute().__str__()
import_file_name = "export.csv"
path_source = current_folder + "/" + import_file_name
print("Importing: " + path_source)
with open(path_source) as file:
cur_lines = file.readlines()
while True:
show_main_menu()
option = ask_for_integer_input(1, 6)
if option == 1: # hier wird importiert oder hinzugefügt
create_menu(cur_lines)
elif option == 2: # hier wird gefiltert und angezeigt
read_menu(cur_lines)
elif option == 3: # hier werden vorhandene werte geändert
update_menu(cur_lines)
elif option == 4: # hier werden vorhanden werte gelöscht
delete_menu(cur_lines)
elif option == 5: # hier wird das programm beendet
#cleanUnicode(path_source)
print("good bye")
break
|
[
"'''\n1. create\n 1. add new single dataset (via console)\n2. read\n 1. show all datasets\n 2. show single dataset\n3. update\n 1. update single dataset\n4. delete\n 1. delete all\n 2. delete single row\n5. save/export\n6. end program\n'''\n\n# This functions prints a main menu structure in the console.\ndef show_main_menu():\n print(\"What would you like to do: \")\n print(\"1. Create\")\n print(\"2. Read\")\n print(\"3. Update\")\n print(\"4. Delete\")\n print(\"5. End\")\n \n # This between min and max valuefunction checks if an integer is input\ndef ask_for_integer_input(min, max):\n while True:\n print(\"Please enter a number between\",min, \"and\",max,\"!\")\n answer = input(\"Input: \")\n if answer.isdecimal():\n if min > int(answer):\n print(\"The entered value is too low.\")\n elif max < int(answer):\n print(\"The entered value is too high.\")\n else:\n return int(answer)\n else:\n print(\"The entered value is no integer.\")\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3)-1])\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else \" \"\n print(first.ljust(3),end=' ')\n for i, token in enumerate(line.split(';')):\n print(fr\"{token.strip()}\".ljust(max_sizes[i]+1,'.'), end='')\n print()\n\ndef delete_single_dateset(lines):\n pass\n\ndef delete_all_datasets(lines):\n pass\n\ndef update_single_dataset(lines):\n pass\n\ndef add_via_console(lines):\n pass\n\ndef create_menu(create_lines):\n pass\n\ndef read_menu(read_lines):\n print(\"1. Show all\")\n print(\"2. Show single line\")\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\ndef update_menu(update_lines):\n pass\n\ndef delete_menu(delete_lines):\n pass\n\n\nimport pathlib # loads the library for object-oriented filesystem paths\ncurrent_folder = pathlib.Path(__file__).parent.absolute().__str__()\nimport_file_name = \"export.csv\"\npath_source = current_folder + \"/\" + import_file_name\nprint(\"Importing: \" + path_source)\n\nwith open(path_source) as file:\n cur_lines = file.readlines()\n while True:\n show_main_menu()\n option = ask_for_integer_input(1, 6)\n if option == 1: # hier wird importiert oder hinzugefügt\n create_menu(cur_lines)\n elif option == 2: # hier wird gefiltert und angezeigt\n read_menu(cur_lines)\n elif option == 3: # hier werden vorhandene werte geändert\n update_menu(cur_lines)\n elif option == 4: # hier werden vorhanden werte gelöscht\n delete_menu(cur_lines)\n elif option == 5: # hier wird das programm beendet\n #cleanUnicode(path_source)\n print(\"good bye\")\n break\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\ndef update_single_dataset(lines):\n pass\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\ndef delete_menu(delete_lines):\n pass\n\n\nimport pathlib\ncurrent_folder = pathlib.Path(__file__).parent.absolute().__str__()\nimport_file_name = 'export.csv'\npath_source = current_folder + '/' + import_file_name\nprint('Importing: ' + path_source)\nwith open(path_source) as file:\n cur_lines = file.readlines()\n while True:\n show_main_menu()\n option = ask_for_integer_input(1, 6)\n if option == 1:\n create_menu(cur_lines)\n elif option == 2:\n read_menu(cur_lines)\n elif option == 3:\n update_menu(cur_lines)\n elif option == 4:\n delete_menu(cur_lines)\n elif option == 5:\n print('good bye')\n break\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\ndef update_single_dataset(lines):\n pass\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\ndef delete_menu(delete_lines):\n pass\n\n\n<import token>\ncurrent_folder = pathlib.Path(__file__).parent.absolute().__str__()\nimport_file_name = 'export.csv'\npath_source = current_folder + '/' + import_file_name\nprint('Importing: ' + path_source)\nwith open(path_source) as file:\n cur_lines = file.readlines()\n while True:\n show_main_menu()\n option = ask_for_integer_input(1, 6)\n if option == 1:\n create_menu(cur_lines)\n elif option == 2:\n read_menu(cur_lines)\n elif option == 3:\n update_menu(cur_lines)\n elif option == 4:\n delete_menu(cur_lines)\n elif option == 5:\n print('good bye')\n break\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\ndef update_single_dataset(lines):\n pass\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\ndef delete_menu(delete_lines):\n pass\n\n\n<import token>\n<assignment token>\nprint('Importing: ' + path_source)\nwith open(path_source) as file:\n cur_lines = file.readlines()\n while True:\n show_main_menu()\n option = ask_for_integer_input(1, 6)\n if option == 1:\n create_menu(cur_lines)\n elif option == 2:\n read_menu(cur_lines)\n elif option == 3:\n update_menu(cur_lines)\n elif option == 4:\n delete_menu(cur_lines)\n elif option == 5:\n print('good bye')\n break\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\ndef update_single_dataset(lines):\n pass\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\ndef delete_menu(delete_lines):\n pass\n\n\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\ndef delete_menu(delete_lines):\n pass\n\n\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n\n\ndef add_via_console(lines):\n pass\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\ndef ask_for_integer_input(min, max):\n while True:\n print('Please enter a number between', min, 'and', max, '!')\n answer = input('Input: ')\n if answer.isdecimal():\n if min > int(answer):\n print('The entered value is too low.')\n elif max < int(answer):\n print('The entered value is too high.')\n else:\n return int(answer)\n else:\n print('The entered value is no integer.')\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n\n\ndef show_main_menu():\n print('What would you like to do: ')\n print('1. Create')\n print('2. Read')\n print('3. Update')\n print('4. Delete')\n print('5. End')\n\n\n<function token>\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n\n\ndef create_menu(create_lines):\n pass\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n\n\ndef peek_single_dataset(lines):\n option3 = ask_for_integer_input(1, len(lines))\n print(lines[int(option3) - 1])\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n\n\ndef show_all_datasets(lines):\n max_sizes = []\n for line in lines:\n for i, token in enumerate(line.split(';')):\n if len(max_sizes) <= i:\n max_sizes.append(len(token.strip()))\n elif len(token.strip()) > max_sizes[i]:\n max_sizes[i] = len(token.strip())\n for linenumber, line in enumerate(lines):\n first = str(linenumber) if linenumber else ' '\n print(first.ljust(3), end=' ')\n for i, token in enumerate(line.split(';')):\n print(f'{token.strip()}'.ljust(max_sizes[i] + 1, '.'), end='')\n print()\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef delete_single_dateset(lines):\n pass\n\n\ndef delete_all_datasets(lines):\n pass\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef delete_single_dateset(lines):\n pass\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef read_menu(read_lines):\n print('1. Show all')\n print('2. Show single line')\n option2 = ask_for_integer_input(1, 2)\n if option2 == 1:\n show_all_datasets(read_lines)\n else:\n peek_single_dataset(read_lines)\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef update_menu(update_lines):\n pass\n\n\n<function token>\n<import token>\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<import token>\n<assignment token>\n<code token>\n"
] | false |
99,055 |
f5ddfb4baa24e81e2439db8a9f5234e259536c7e
|
#nst9fk
import random
import math
import Card
import sys
def play_a_game(data):
hand_number = data[2] + data[3] + data[4] + 1
print("\n\nHand %d:" % hand_number)
# Take bet
bet = -1
while bet > data[1] or bet <= 0:
try:
print("You have %d chips." % data[1])
bet = int(input("How many chips to bet for this hand? "))
except ValueError:
print("Please enter a valid number of chips to bet!")
bet = -1
else:
if bet>data[1]:
print("You cannot bet more chips than you have!")
elif bet==0:
print("You cannot bet zero chips!")
elif bet<0:
print("You cannot bet negative chips!")
data[5] = bet
# Deal Player Cards
print("Dealing Cards...\n")
number_aces=0
total=0
# check for aces
new_card1 = Card.Card()
if new_card1.get_value() == 11:
number_aces = 1
new_card2 = Card.Card()
if new_card2.get_value() == 11:
if number_aces == 1:
number_aces = 1
total = new_card1.get_value() + new_card2.get_value()
# if you draw two aces, total goes to 12
if total == 22:
total = 12
hand = [new_card1,new_card2]
# Deal Dealer's Cards
dealer_aces=0
dealer_total = 0
dealer_card1 = Card.Card()
if dealer_card1.get_value() == 11:
dealer_aces = 1
dealer_card2 = Card.Card()
if dealer_card2.get_value() == 11:
dealer_aces = 1
dealer_total = dealer_card1.get_value() + dealer_card2.get_value()
# if you draw two aces, total goes to 12
if dealer_total == 22:
dealer_total = 12
dealer_hand = [dealer_card1,dealer_card2]
# print hands
print("Your hand:")
for x in hand:
print(x)
print("\nDealer's shown card:")
print(dealer_hand[0])
# check for blackjacks
if total == 21:
print("\nBLACKJACK!")
data[5] = math.floor(data[5]*1.5)
win(data)
elif dealer_total == 21:
print(dealer_hand[1])
print("\nDealer has Blackjack!")
loss(data)
# player hit or stand
while True:
choice = input("\nEnter '1' to hit, anything else to stand: ")
if choice == "1":
# deal card
new_card = Card.Card()
print("Card Drawn: " + str(new_card))
if new_card.get_value() == 11:
number_aces +=1
total += new_card.get_value()
# player busts, check for aces
if total > 21:
if number_aces > 0:
total -= 10
number_aces -= 1
else:
print("BUST")
print("Your total: %d" % total)
loss(data)
# player did not bust, add card to hand
hand.append(new_card)
print("\nYour hand:")
for x in hand:
print(x)
print("\nDealer's shown card:")
print(dealer_hand[0])
# repeat until user does not hit
else:
break
# User has stood
# Show dealer hand
print("\nDealer's hand:")
for x in dealer_hand:
print(x)
# if the dealer is at or above 17, do not print totals
# print is covered after the while loop
if dealer_total >= 17:
print("\nDealer total: %d" % dealer_total)
print("Your total: %d" % total)
# dealer already has a better hand
if dealer_total>total:
loss(data)
# dealers hand is not better
while dealer_total<17 and dealer_total<total:
# deal card
new_card = Card.Card()
print("\nCard Drawn: " + str(new_card))
if new_card.get_value() == 11:
dealer_aces +=1
dealer_total += new_card.get_value()
# dealer busts, check for aces
if dealer_total > 21:
if dealer_aces > 0:
dealer_total -= 10
dealer_aces -= 1
else:
print("New Dealer Total: %d" % dealer_total)
print("DEALER BUST\n")
win(data)
print("New Dealer Total: %d\n" % dealer_total)
# dealer did not bust, add card to hand
dealer_hand.append(new_card)
# repeat until dealer hits or passes 17 or wins
# if no-one busted, determine outcome
if(dealer_total<22 and total<22):
print("Dealer total: %d" % dealer_total)
print("Your total: %d" % total)
if dealer_total > total:
loss(data)
elif dealer_total < total:
win(data)
else:
tie(data)
# add winnings to user, increase win stat
def win(data):
print("You Win!")
data[2] += 1
data[1] += data[5]
post_game_menu(data)
# remove losses from user, increase loss stat
def loss(data):
print("You Lose!")
data[3] += 1
data[1] -= data[5]
post_game_menu(data)
# increase tie stat
def tie(data):
print("It's a Tie!")
data[4] += 1
post_game_menu(data)
def load_stats():
# get name to load file
name = input("What is your name? ")
try:
# try reading file of given name
data = [name,0,0,0,0,0]
f = open(name+".usr","r")
data[0] = f.readline()
data[1] = f.readline()
data[2] = f.readline()
data[3] = f.readline()
data[4] = f.readline()
f.close()
except Exception as e:
print("data unable to be loaded!")
print(e)
menu()
else:
data[0]=data[0].strip("\n")
data[1]=int(data[1])
data[2]=int(data[2])
data[3]=int(data[3])
data[4]=int(data[4])
print("Welcome back %s, let's play!" % data[0])
play_a_game(data)
def save_stats(data):
try:
# try writing data of user to file
f = open(data[0]+".usr","w")
f.write(data[0]+"\n")
f.write(str(data[1])+"\n")
f.write(str(data[2])+"\n")
f.write(str(data[3])+"\n")
f.write(str(data[4])+"\n")
f.close()
except Exception as e:
print("Sorry" + data[0] + ", your data was not able to be saved!")
print(e)
else:
print(data[0] +", your data has been saved!")
def stats(data):
print("\n"+ data[0]+", here are your game play statistics...")
print("Chips: %d" % data[1])
print("Wins: %d" % data[2])
print("Losses: %d" % data[3])
print("Ties: %d" % data[4])
try:
ratio = data[2] / data[3]
except ZeroDivisionError:
print("Win/loss ratio: INFINITE")
else:
print("Win/loss ratio: %.3f" % ratio)
post_game_menu(data)
def post_game_menu(data):
# To prevent being stuck with no chips, players are given 100 more in case they lose all chips to continue play
if data[1] == 0:
print("You've run out of chips!\nHere is another 100 to get you going!\nGood Luck!")
data[1] = 100
print("\n1. Play again")
print("2. View statistics")
print("3. Quit")
choice = input("\nEnter choice: ")
if choice == '1':
play_a_game(data)
elif choice == '2':
stats(data)
elif choice == '3':
save_stats(data)
print("Bye!")
sys.exit(0)
else:
print("Invalid choice!")
post_game_menu(data)
def menu():
print("Let's Play Blackjack!\n")
print("1. Start a new player")
print("2. Load a player")
print("3. Quit")
choices =["1", "2", "3"]
choice = input("Enter choice: ")
while choice not in choices:
print(choice + " is not a valid choice!")
choice = input("Enter a valid choice [1-3]: ")
if choice == '1':
name = input("\nWhat is your name? ")
print("Hello "+name+". Let's play!")
data = [name,100,0,0,0,0]
print("You will start with "+str(data[1])+ " chips")
play_a_game(data)
if choice == '2':
load_stats()
if choice == '3':
print("Bye!")
sys.exit(0)
menu()
|
[
"#nst9fk\r\nimport random\r\nimport math\r\nimport Card\r\nimport sys\r\n\r\ndef play_a_game(data):\r\n hand_number = data[2] + data[3] + data[4] + 1\r\n print(\"\\n\\nHand %d:\" % hand_number)\r\n\r\n # Take bet\r\n bet = -1\r\n while bet > data[1] or bet <= 0:\r\n try:\r\n print(\"You have %d chips.\" % data[1])\r\n bet = int(input(\"How many chips to bet for this hand? \"))\r\n except ValueError:\r\n print(\"Please enter a valid number of chips to bet!\")\r\n bet = -1\r\n else:\r\n if bet>data[1]:\r\n print(\"You cannot bet more chips than you have!\")\r\n elif bet==0:\r\n print(\"You cannot bet zero chips!\")\r\n elif bet<0:\r\n print(\"You cannot bet negative chips!\")\r\n data[5] = bet\r\n # Deal Player Cards\r\n print(\"Dealing Cards...\\n\")\r\n number_aces=0\r\n total=0\r\n # check for aces\r\n new_card1 = Card.Card()\r\n if new_card1.get_value() == 11:\r\n number_aces = 1\r\n new_card2 = Card.Card()\r\n if new_card2.get_value() == 11:\r\n if number_aces == 1:\r\n number_aces = 1\r\n total = new_card1.get_value() + new_card2.get_value()\r\n # if you draw two aces, total goes to 12\r\n if total == 22:\r\n total = 12\r\n hand = [new_card1,new_card2]\r\n\r\n # Deal Dealer's Cards\r\n dealer_aces=0\r\n dealer_total = 0\r\n dealer_card1 = Card.Card()\r\n if dealer_card1.get_value() == 11:\r\n dealer_aces = 1\r\n dealer_card2 = Card.Card()\r\n if dealer_card2.get_value() == 11:\r\n dealer_aces = 1\r\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\r\n # if you draw two aces, total goes to 12\r\n if dealer_total == 22:\r\n dealer_total = 12\r\n dealer_hand = [dealer_card1,dealer_card2]\r\n\r\n # print hands\r\n print(\"Your hand:\")\r\n for x in hand:\r\n print(x)\r\n print(\"\\nDealer's shown card:\")\r\n print(dealer_hand[0])\r\n\r\n # check for blackjacks\r\n if total == 21:\r\n print(\"\\nBLACKJACK!\")\r\n data[5] = math.floor(data[5]*1.5)\r\n win(data)\r\n elif dealer_total == 21:\r\n print(dealer_hand[1])\r\n print(\"\\nDealer has Blackjack!\")\r\n loss(data)\r\n\r\n \r\n # player hit or stand\r\n while True:\r\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\r\n if choice == \"1\":\r\n # deal card\r\n new_card = Card.Card()\r\n print(\"Card Drawn: \" + str(new_card))\r\n if new_card.get_value() == 11:\r\n number_aces +=1\r\n total += new_card.get_value()\r\n # player busts, check for aces\r\n if total > 21:\r\n if number_aces > 0:\r\n total -= 10\r\n number_aces -= 1\r\n else:\r\n print(\"BUST\")\r\n print(\"Your total: %d\" % total)\r\n loss(data)\r\n # player did not bust, add card to hand\r\n hand.append(new_card)\r\n print(\"\\nYour hand:\")\r\n for x in hand:\r\n print(x)\r\n print(\"\\nDealer's shown card:\")\r\n print(dealer_hand[0])\r\n # repeat until user does not hit\r\n else:\r\n break\r\n\r\n # User has stood\r\n # Show dealer hand\r\n print(\"\\nDealer's hand:\")\r\n for x in dealer_hand:\r\n print(x)\r\n # if the dealer is at or above 17, do not print totals\r\n # print is covered after the while loop\r\n if dealer_total >= 17:\r\n print(\"\\nDealer total: %d\" % dealer_total)\r\n print(\"Your total: %d\" % total)\r\n # dealer already has a better hand\r\n if dealer_total>total:\r\n loss(data)\r\n # dealers hand is not better\r\n while dealer_total<17 and dealer_total<total:\r\n # deal card\r\n new_card = Card.Card()\r\n print(\"\\nCard Drawn: \" + str(new_card))\r\n if new_card.get_value() == 11:\r\n dealer_aces +=1\r\n dealer_total += new_card.get_value()\r\n # dealer busts, check for aces\r\n if dealer_total > 21:\r\n if dealer_aces > 0:\r\n dealer_total -= 10\r\n dealer_aces -= 1\r\n else:\r\n print(\"New Dealer Total: %d\" % dealer_total)\r\n print(\"DEALER BUST\\n\")\r\n win(data)\r\n print(\"New Dealer Total: %d\\n\" % dealer_total)\r\n # dealer did not bust, add card to hand\r\n dealer_hand.append(new_card)\r\n \r\n # repeat until dealer hits or passes 17 or wins\r\n # if no-one busted, determine outcome\r\n if(dealer_total<22 and total<22):\r\n print(\"Dealer total: %d\" % dealer_total)\r\n print(\"Your total: %d\" % total)\r\n if dealer_total > total:\r\n loss(data)\r\n elif dealer_total < total:\r\n win(data)\r\n else:\r\n tie(data)\r\n\r\n# add winnings to user, increase win stat\r\ndef win(data):\r\n print(\"You Win!\")\r\n data[2] += 1\r\n data[1] += data[5]\r\n post_game_menu(data)\r\n\r\n# remove losses from user, increase loss stat\r\ndef loss(data):\r\n print(\"You Lose!\")\r\n data[3] += 1\r\n data[1] -= data[5]\r\n post_game_menu(data)\r\n\r\n# increase tie stat\r\ndef tie(data):\r\n print(\"It's a Tie!\")\r\n data[4] += 1\r\n post_game_menu(data)\r\n\r\n\r\ndef load_stats():\r\n # get name to load file\r\n name = input(\"What is your name? \")\r\n try:\r\n # try reading file of given name\r\n data = [name,0,0,0,0,0]\r\n f = open(name+\".usr\",\"r\")\r\n data[0] = f.readline()\r\n data[1] = f.readline()\r\n data[2] = f.readline()\r\n data[3] = f.readline()\r\n data[4] = f.readline()\r\n f.close()\r\n except Exception as e:\r\n print(\"data unable to be loaded!\")\r\n print(e)\r\n menu()\r\n else:\r\n data[0]=data[0].strip(\"\\n\")\r\n data[1]=int(data[1])\r\n data[2]=int(data[2])\r\n data[3]=int(data[3])\r\n data[4]=int(data[4])\r\n print(\"Welcome back %s, let's play!\" % data[0])\r\n play_a_game(data)\r\n\r\ndef save_stats(data):\r\n try:\r\n # try writing data of user to file\r\n f = open(data[0]+\".usr\",\"w\")\r\n f.write(data[0]+\"\\n\")\r\n f.write(str(data[1])+\"\\n\")\r\n f.write(str(data[2])+\"\\n\")\r\n f.write(str(data[3])+\"\\n\")\r\n f.write(str(data[4])+\"\\n\")\r\n f.close()\r\n except Exception as e:\r\n print(\"Sorry\" + data[0] + \", your data was not able to be saved!\")\r\n print(e)\r\n else:\r\n print(data[0] +\", your data has been saved!\")\r\n\r\ndef stats(data):\r\n print(\"\\n\"+ data[0]+\", here are your game play statistics...\")\r\n print(\"Chips: %d\" % data[1])\r\n print(\"Wins: %d\" % data[2])\r\n print(\"Losses: %d\" % data[3])\r\n print(\"Ties: %d\" % data[4])\r\n try:\r\n ratio = data[2] / data[3]\r\n except ZeroDivisionError:\r\n print(\"Win/loss ratio: INFINITE\")\r\n else:\r\n print(\"Win/loss ratio: %.3f\" % ratio)\r\n post_game_menu(data)\r\n \r\ndef post_game_menu(data):\r\n # To prevent being stuck with no chips, players are given 100 more in case they lose all chips to continue play\r\n if data[1] == 0:\r\n print(\"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\")\r\n data[1] = 100\r\n print(\"\\n1. Play again\")\r\n print(\"2. View statistics\")\r\n print(\"3. Quit\")\r\n choice = input(\"\\nEnter choice: \")\r\n if choice == '1':\r\n play_a_game(data)\r\n elif choice == '2':\r\n stats(data)\r\n elif choice == '3':\r\n save_stats(data)\r\n print(\"Bye!\")\r\n sys.exit(0)\r\n else:\r\n print(\"Invalid choice!\")\r\n post_game_menu(data)\r\n \r\ndef menu():\r\n print(\"Let's Play Blackjack!\\n\")\r\n print(\"1. Start a new player\")\r\n print(\"2. Load a player\")\r\n print(\"3. Quit\")\r\n choices =[\"1\", \"2\", \"3\"]\r\n choice = input(\"Enter choice: \")\r\n\r\n while choice not in choices:\r\n print(choice + \" is not a valid choice!\")\r\n choice = input(\"Enter a valid choice [1-3]: \")\r\n \r\n if choice == '1':\r\n name = input(\"\\nWhat is your name? \")\r\n print(\"Hello \"+name+\". Let's play!\")\r\n data = [name,100,0,0,0,0]\r\n print(\"You will start with \"+str(data[1])+ \" chips\")\r\n play_a_game(data)\r\n if choice == '2':\r\n load_stats()\r\n if choice == '3':\r\n print(\"Bye!\")\r\n sys.exit(0)\r\n \r\n\r\nmenu()\r\n",
"import random\nimport math\nimport Card\nimport sys\n\n\ndef play_a_game(data):\n hand_number = data[2] + data[3] + data[4] + 1\n print('\\n\\nHand %d:' % hand_number)\n bet = -1\n while bet > data[1] or bet <= 0:\n try:\n print('You have %d chips.' % data[1])\n bet = int(input('How many chips to bet for this hand? '))\n except ValueError:\n print('Please enter a valid number of chips to bet!')\n bet = -1\n else:\n if bet > data[1]:\n print('You cannot bet more chips than you have!')\n elif bet == 0:\n print('You cannot bet zero chips!')\n elif bet < 0:\n print('You cannot bet negative chips!')\n data[5] = bet\n print('Dealing Cards...\\n')\n number_aces = 0\n total = 0\n new_card1 = Card.Card()\n if new_card1.get_value() == 11:\n number_aces = 1\n new_card2 = Card.Card()\n if new_card2.get_value() == 11:\n if number_aces == 1:\n number_aces = 1\n total = new_card1.get_value() + new_card2.get_value()\n if total == 22:\n total = 12\n hand = [new_card1, new_card2]\n dealer_aces = 0\n dealer_total = 0\n dealer_card1 = Card.Card()\n if dealer_card1.get_value() == 11:\n dealer_aces = 1\n dealer_card2 = Card.Card()\n if dealer_card2.get_value() == 11:\n dealer_aces = 1\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\n if dealer_total == 22:\n dealer_total = 12\n dealer_hand = [dealer_card1, dealer_card2]\n print('Your hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n if total == 21:\n print('\\nBLACKJACK!')\n data[5] = math.floor(data[5] * 1.5)\n win(data)\n elif dealer_total == 21:\n print(dealer_hand[1])\n print('\\nDealer has Blackjack!')\n loss(data)\n while True:\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\n if choice == '1':\n new_card = Card.Card()\n print('Card Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n number_aces += 1\n total += new_card.get_value()\n if total > 21:\n if number_aces > 0:\n total -= 10\n number_aces -= 1\n else:\n print('BUST')\n print('Your total: %d' % total)\n loss(data)\n hand.append(new_card)\n print('\\nYour hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n else:\n break\n print(\"\\nDealer's hand:\")\n for x in dealer_hand:\n print(x)\n if dealer_total >= 17:\n print('\\nDealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n while dealer_total < 17 and dealer_total < total:\n new_card = Card.Card()\n print('\\nCard Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n dealer_aces += 1\n dealer_total += new_card.get_value()\n if dealer_total > 21:\n if dealer_aces > 0:\n dealer_total -= 10\n dealer_aces -= 1\n else:\n print('New Dealer Total: %d' % dealer_total)\n print('DEALER BUST\\n')\n win(data)\n print('New Dealer Total: %d\\n' % dealer_total)\n dealer_hand.append(new_card)\n if dealer_total < 22 and total < 22:\n print('Dealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n elif dealer_total < total:\n win(data)\n else:\n tie(data)\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\ndef save_stats(data):\n try:\n f = open(data[0] + '.usr', 'w')\n f.write(data[0] + '\\n')\n f.write(str(data[1]) + '\\n')\n f.write(str(data[2]) + '\\n')\n f.write(str(data[3]) + '\\n')\n f.write(str(data[4]) + '\\n')\n f.close()\n except Exception as e:\n print('Sorry' + data[0] + ', your data was not able to be saved!')\n print(e)\n else:\n print(data[0] + ', your data has been saved!')\n\n\ndef stats(data):\n print('\\n' + data[0] + ', here are your game play statistics...')\n print('Chips: %d' % data[1])\n print('Wins: %d' % data[2])\n print('Losses: %d' % data[3])\n print('Ties: %d' % data[4])\n try:\n ratio = data[2] / data[3]\n except ZeroDivisionError:\n print('Win/loss ratio: INFINITE')\n else:\n print('Win/loss ratio: %.3f' % ratio)\n post_game_menu(data)\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\nmenu()\n",
"<import token>\n\n\ndef play_a_game(data):\n hand_number = data[2] + data[3] + data[4] + 1\n print('\\n\\nHand %d:' % hand_number)\n bet = -1\n while bet > data[1] or bet <= 0:\n try:\n print('You have %d chips.' % data[1])\n bet = int(input('How many chips to bet for this hand? '))\n except ValueError:\n print('Please enter a valid number of chips to bet!')\n bet = -1\n else:\n if bet > data[1]:\n print('You cannot bet more chips than you have!')\n elif bet == 0:\n print('You cannot bet zero chips!')\n elif bet < 0:\n print('You cannot bet negative chips!')\n data[5] = bet\n print('Dealing Cards...\\n')\n number_aces = 0\n total = 0\n new_card1 = Card.Card()\n if new_card1.get_value() == 11:\n number_aces = 1\n new_card2 = Card.Card()\n if new_card2.get_value() == 11:\n if number_aces == 1:\n number_aces = 1\n total = new_card1.get_value() + new_card2.get_value()\n if total == 22:\n total = 12\n hand = [new_card1, new_card2]\n dealer_aces = 0\n dealer_total = 0\n dealer_card1 = Card.Card()\n if dealer_card1.get_value() == 11:\n dealer_aces = 1\n dealer_card2 = Card.Card()\n if dealer_card2.get_value() == 11:\n dealer_aces = 1\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\n if dealer_total == 22:\n dealer_total = 12\n dealer_hand = [dealer_card1, dealer_card2]\n print('Your hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n if total == 21:\n print('\\nBLACKJACK!')\n data[5] = math.floor(data[5] * 1.5)\n win(data)\n elif dealer_total == 21:\n print(dealer_hand[1])\n print('\\nDealer has Blackjack!')\n loss(data)\n while True:\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\n if choice == '1':\n new_card = Card.Card()\n print('Card Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n number_aces += 1\n total += new_card.get_value()\n if total > 21:\n if number_aces > 0:\n total -= 10\n number_aces -= 1\n else:\n print('BUST')\n print('Your total: %d' % total)\n loss(data)\n hand.append(new_card)\n print('\\nYour hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n else:\n break\n print(\"\\nDealer's hand:\")\n for x in dealer_hand:\n print(x)\n if dealer_total >= 17:\n print('\\nDealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n while dealer_total < 17 and dealer_total < total:\n new_card = Card.Card()\n print('\\nCard Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n dealer_aces += 1\n dealer_total += new_card.get_value()\n if dealer_total > 21:\n if dealer_aces > 0:\n dealer_total -= 10\n dealer_aces -= 1\n else:\n print('New Dealer Total: %d' % dealer_total)\n print('DEALER BUST\\n')\n win(data)\n print('New Dealer Total: %d\\n' % dealer_total)\n dealer_hand.append(new_card)\n if dealer_total < 22 and total < 22:\n print('Dealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n elif dealer_total < total:\n win(data)\n else:\n tie(data)\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\ndef save_stats(data):\n try:\n f = open(data[0] + '.usr', 'w')\n f.write(data[0] + '\\n')\n f.write(str(data[1]) + '\\n')\n f.write(str(data[2]) + '\\n')\n f.write(str(data[3]) + '\\n')\n f.write(str(data[4]) + '\\n')\n f.close()\n except Exception as e:\n print('Sorry' + data[0] + ', your data was not able to be saved!')\n print(e)\n else:\n print(data[0] + ', your data has been saved!')\n\n\ndef stats(data):\n print('\\n' + data[0] + ', here are your game play statistics...')\n print('Chips: %d' % data[1])\n print('Wins: %d' % data[2])\n print('Losses: %d' % data[3])\n print('Ties: %d' % data[4])\n try:\n ratio = data[2] / data[3]\n except ZeroDivisionError:\n print('Win/loss ratio: INFINITE')\n else:\n print('Win/loss ratio: %.3f' % ratio)\n post_game_menu(data)\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\nmenu()\n",
"<import token>\n\n\ndef play_a_game(data):\n hand_number = data[2] + data[3] + data[4] + 1\n print('\\n\\nHand %d:' % hand_number)\n bet = -1\n while bet > data[1] or bet <= 0:\n try:\n print('You have %d chips.' % data[1])\n bet = int(input('How many chips to bet for this hand? '))\n except ValueError:\n print('Please enter a valid number of chips to bet!')\n bet = -1\n else:\n if bet > data[1]:\n print('You cannot bet more chips than you have!')\n elif bet == 0:\n print('You cannot bet zero chips!')\n elif bet < 0:\n print('You cannot bet negative chips!')\n data[5] = bet\n print('Dealing Cards...\\n')\n number_aces = 0\n total = 0\n new_card1 = Card.Card()\n if new_card1.get_value() == 11:\n number_aces = 1\n new_card2 = Card.Card()\n if new_card2.get_value() == 11:\n if number_aces == 1:\n number_aces = 1\n total = new_card1.get_value() + new_card2.get_value()\n if total == 22:\n total = 12\n hand = [new_card1, new_card2]\n dealer_aces = 0\n dealer_total = 0\n dealer_card1 = Card.Card()\n if dealer_card1.get_value() == 11:\n dealer_aces = 1\n dealer_card2 = Card.Card()\n if dealer_card2.get_value() == 11:\n dealer_aces = 1\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\n if dealer_total == 22:\n dealer_total = 12\n dealer_hand = [dealer_card1, dealer_card2]\n print('Your hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n if total == 21:\n print('\\nBLACKJACK!')\n data[5] = math.floor(data[5] * 1.5)\n win(data)\n elif dealer_total == 21:\n print(dealer_hand[1])\n print('\\nDealer has Blackjack!')\n loss(data)\n while True:\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\n if choice == '1':\n new_card = Card.Card()\n print('Card Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n number_aces += 1\n total += new_card.get_value()\n if total > 21:\n if number_aces > 0:\n total -= 10\n number_aces -= 1\n else:\n print('BUST')\n print('Your total: %d' % total)\n loss(data)\n hand.append(new_card)\n print('\\nYour hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n else:\n break\n print(\"\\nDealer's hand:\")\n for x in dealer_hand:\n print(x)\n if dealer_total >= 17:\n print('\\nDealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n while dealer_total < 17 and dealer_total < total:\n new_card = Card.Card()\n print('\\nCard Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n dealer_aces += 1\n dealer_total += new_card.get_value()\n if dealer_total > 21:\n if dealer_aces > 0:\n dealer_total -= 10\n dealer_aces -= 1\n else:\n print('New Dealer Total: %d' % dealer_total)\n print('DEALER BUST\\n')\n win(data)\n print('New Dealer Total: %d\\n' % dealer_total)\n dealer_hand.append(new_card)\n if dealer_total < 22 and total < 22:\n print('Dealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n elif dealer_total < total:\n win(data)\n else:\n tie(data)\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\ndef save_stats(data):\n try:\n f = open(data[0] + '.usr', 'w')\n f.write(data[0] + '\\n')\n f.write(str(data[1]) + '\\n')\n f.write(str(data[2]) + '\\n')\n f.write(str(data[3]) + '\\n')\n f.write(str(data[4]) + '\\n')\n f.close()\n except Exception as e:\n print('Sorry' + data[0] + ', your data was not able to be saved!')\n print(e)\n else:\n print(data[0] + ', your data has been saved!')\n\n\ndef stats(data):\n print('\\n' + data[0] + ', here are your game play statistics...')\n print('Chips: %d' % data[1])\n print('Wins: %d' % data[2])\n print('Losses: %d' % data[3])\n print('Ties: %d' % data[4])\n try:\n ratio = data[2] / data[3]\n except ZeroDivisionError:\n print('Win/loss ratio: INFINITE')\n else:\n print('Win/loss ratio: %.3f' % ratio)\n post_game_menu(data)\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n\n\ndef play_a_game(data):\n hand_number = data[2] + data[3] + data[4] + 1\n print('\\n\\nHand %d:' % hand_number)\n bet = -1\n while bet > data[1] or bet <= 0:\n try:\n print('You have %d chips.' % data[1])\n bet = int(input('How many chips to bet for this hand? '))\n except ValueError:\n print('Please enter a valid number of chips to bet!')\n bet = -1\n else:\n if bet > data[1]:\n print('You cannot bet more chips than you have!')\n elif bet == 0:\n print('You cannot bet zero chips!')\n elif bet < 0:\n print('You cannot bet negative chips!')\n data[5] = bet\n print('Dealing Cards...\\n')\n number_aces = 0\n total = 0\n new_card1 = Card.Card()\n if new_card1.get_value() == 11:\n number_aces = 1\n new_card2 = Card.Card()\n if new_card2.get_value() == 11:\n if number_aces == 1:\n number_aces = 1\n total = new_card1.get_value() + new_card2.get_value()\n if total == 22:\n total = 12\n hand = [new_card1, new_card2]\n dealer_aces = 0\n dealer_total = 0\n dealer_card1 = Card.Card()\n if dealer_card1.get_value() == 11:\n dealer_aces = 1\n dealer_card2 = Card.Card()\n if dealer_card2.get_value() == 11:\n dealer_aces = 1\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\n if dealer_total == 22:\n dealer_total = 12\n dealer_hand = [dealer_card1, dealer_card2]\n print('Your hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n if total == 21:\n print('\\nBLACKJACK!')\n data[5] = math.floor(data[5] * 1.5)\n win(data)\n elif dealer_total == 21:\n print(dealer_hand[1])\n print('\\nDealer has Blackjack!')\n loss(data)\n while True:\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\n if choice == '1':\n new_card = Card.Card()\n print('Card Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n number_aces += 1\n total += new_card.get_value()\n if total > 21:\n if number_aces > 0:\n total -= 10\n number_aces -= 1\n else:\n print('BUST')\n print('Your total: %d' % total)\n loss(data)\n hand.append(new_card)\n print('\\nYour hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n else:\n break\n print(\"\\nDealer's hand:\")\n for x in dealer_hand:\n print(x)\n if dealer_total >= 17:\n print('\\nDealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n while dealer_total < 17 and dealer_total < total:\n new_card = Card.Card()\n print('\\nCard Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n dealer_aces += 1\n dealer_total += new_card.get_value()\n if dealer_total > 21:\n if dealer_aces > 0:\n dealer_total -= 10\n dealer_aces -= 1\n else:\n print('New Dealer Total: %d' % dealer_total)\n print('DEALER BUST\\n')\n win(data)\n print('New Dealer Total: %d\\n' % dealer_total)\n dealer_hand.append(new_card)\n if dealer_total < 22 and total < 22:\n print('Dealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n elif dealer_total < total:\n win(data)\n else:\n tie(data)\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\n<function token>\n\n\ndef stats(data):\n print('\\n' + data[0] + ', here are your game play statistics...')\n print('Chips: %d' % data[1])\n print('Wins: %d' % data[2])\n print('Losses: %d' % data[3])\n print('Ties: %d' % data[4])\n try:\n ratio = data[2] / data[3]\n except ZeroDivisionError:\n print('Win/loss ratio: INFINITE')\n else:\n print('Win/loss ratio: %.3f' % ratio)\n post_game_menu(data)\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n\n\ndef play_a_game(data):\n hand_number = data[2] + data[3] + data[4] + 1\n print('\\n\\nHand %d:' % hand_number)\n bet = -1\n while bet > data[1] or bet <= 0:\n try:\n print('You have %d chips.' % data[1])\n bet = int(input('How many chips to bet for this hand? '))\n except ValueError:\n print('Please enter a valid number of chips to bet!')\n bet = -1\n else:\n if bet > data[1]:\n print('You cannot bet more chips than you have!')\n elif bet == 0:\n print('You cannot bet zero chips!')\n elif bet < 0:\n print('You cannot bet negative chips!')\n data[5] = bet\n print('Dealing Cards...\\n')\n number_aces = 0\n total = 0\n new_card1 = Card.Card()\n if new_card1.get_value() == 11:\n number_aces = 1\n new_card2 = Card.Card()\n if new_card2.get_value() == 11:\n if number_aces == 1:\n number_aces = 1\n total = new_card1.get_value() + new_card2.get_value()\n if total == 22:\n total = 12\n hand = [new_card1, new_card2]\n dealer_aces = 0\n dealer_total = 0\n dealer_card1 = Card.Card()\n if dealer_card1.get_value() == 11:\n dealer_aces = 1\n dealer_card2 = Card.Card()\n if dealer_card2.get_value() == 11:\n dealer_aces = 1\n dealer_total = dealer_card1.get_value() + dealer_card2.get_value()\n if dealer_total == 22:\n dealer_total = 12\n dealer_hand = [dealer_card1, dealer_card2]\n print('Your hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n if total == 21:\n print('\\nBLACKJACK!')\n data[5] = math.floor(data[5] * 1.5)\n win(data)\n elif dealer_total == 21:\n print(dealer_hand[1])\n print('\\nDealer has Blackjack!')\n loss(data)\n while True:\n choice = input(\"\\nEnter '1' to hit, anything else to stand: \")\n if choice == '1':\n new_card = Card.Card()\n print('Card Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n number_aces += 1\n total += new_card.get_value()\n if total > 21:\n if number_aces > 0:\n total -= 10\n number_aces -= 1\n else:\n print('BUST')\n print('Your total: %d' % total)\n loss(data)\n hand.append(new_card)\n print('\\nYour hand:')\n for x in hand:\n print(x)\n print(\"\\nDealer's shown card:\")\n print(dealer_hand[0])\n else:\n break\n print(\"\\nDealer's hand:\")\n for x in dealer_hand:\n print(x)\n if dealer_total >= 17:\n print('\\nDealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n while dealer_total < 17 and dealer_total < total:\n new_card = Card.Card()\n print('\\nCard Drawn: ' + str(new_card))\n if new_card.get_value() == 11:\n dealer_aces += 1\n dealer_total += new_card.get_value()\n if dealer_total > 21:\n if dealer_aces > 0:\n dealer_total -= 10\n dealer_aces -= 1\n else:\n print('New Dealer Total: %d' % dealer_total)\n print('DEALER BUST\\n')\n win(data)\n print('New Dealer Total: %d\\n' % dealer_total)\n dealer_hand.append(new_card)\n if dealer_total < 22 and total < 22:\n print('Dealer total: %d' % dealer_total)\n print('Your total: %d' % total)\n if dealer_total > total:\n loss(data)\n elif dealer_total < total:\n win(data)\n else:\n tie(data)\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\n<function token>\n<function token>\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\ndef loss(data):\n print('You Lose!')\n data[3] += 1\n data[1] -= data[5]\n post_game_menu(data)\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\n<function token>\n<function token>\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\n<function token>\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\ndef load_stats():\n name = input('What is your name? ')\n try:\n data = [name, 0, 0, 0, 0, 0]\n f = open(name + '.usr', 'r')\n data[0] = f.readline()\n data[1] = f.readline()\n data[2] = f.readline()\n data[3] = f.readline()\n data[4] = f.readline()\n f.close()\n except Exception as e:\n print('data unable to be loaded!')\n print(e)\n menu()\n else:\n data[0] = data[0].strip('\\n')\n data[1] = int(data[1])\n data[2] = int(data[2])\n data[3] = int(data[3])\n data[4] = int(data[4])\n print(\"Welcome back %s, let's play!\" % data[0])\n play_a_game(data)\n\n\n<function token>\n<function token>\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\n<function token>\n\n\ndef tie(data):\n print(\"It's a Tie!\")\n data[4] += 1\n post_game_menu(data)\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef post_game_menu(data):\n if data[1] == 0:\n print(\n \"You've run out of chips!\\nHere is another 100 to get you going!\\nGood Luck!\"\n )\n data[1] = 100\n print('\\n1. Play again')\n print('2. View statistics')\n print('3. Quit')\n choice = input('\\nEnter choice: ')\n if choice == '1':\n play_a_game(data)\n elif choice == '2':\n stats(data)\n elif choice == '3':\n save_stats(data)\n print('Bye!')\n sys.exit(0)\n else:\n print('Invalid choice!')\n post_game_menu(data)\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef menu():\n print(\"Let's Play Blackjack!\\n\")\n print('1. Start a new player')\n print('2. Load a player')\n print('3. Quit')\n choices = ['1', '2', '3']\n choice = input('Enter choice: ')\n while choice not in choices:\n print(choice + ' is not a valid choice!')\n choice = input('Enter a valid choice [1-3]: ')\n if choice == '1':\n name = input('\\nWhat is your name? ')\n print('Hello ' + name + \". Let's play!\")\n data = [name, 100, 0, 0, 0, 0]\n print('You will start with ' + str(data[1]) + ' chips')\n play_a_game(data)\n if choice == '2':\n load_stats()\n if choice == '3':\n print('Bye!')\n sys.exit(0)\n\n\n<code token>\n",
"<import token>\n<function token>\n\n\ndef win(data):\n print('You Win!')\n data[2] += 1\n data[1] += data[5]\n post_game_menu(data)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
99,056 |
2c8f36b4544dec8350e30d09c904273b940a42b3
|
#!/usr/bin/env python2
"""Test module for showing off modules
This should never be used in production
"""
"""Sedan class holds information about cars"""
class Sedan():
def __init__(self, numwheels):
self.numwheels = numwheels
def go_forward(self):
"""Prints 'going forward'"""
print "going forward"
def test():
"""Print out the word test
No arguments or returns
"""
print "hello"
|
[
"#!/usr/bin/env python2\n\n\"\"\"Test module for showing off modules\n\nThis should never be used in production\n\"\"\"\n\n\"\"\"Sedan class holds information about cars\"\"\"\nclass Sedan():\n def __init__(self, numwheels):\n self.numwheels = numwheels\n\n def go_forward(self):\n \"\"\"Prints 'going forward'\"\"\"\n print \"going forward\"\n\n\ndef test():\n \"\"\"Print out the word test\n\n No arguments or returns\n \"\"\"\n print \"hello\"\n\n"
] | true |
99,057 |
644d0622de99a27d4376ef8bbe0f12b093b618a9
|
import numpy as np
import pandas as pd
import plotly.express as px
import plotly.graph_objs as go
# import plotly.tools as tls
import matplotlib.pyplot as plt
from scipy.spatial import distance
from sklearn.utils.extmath import randomized_svd
from tqdm import tqdm
class kohonen:
"""
Matrix SOM
Initialize weight matrix
For epoch <- 1 to N do
Choose input matrix observation randomly - i
For k <- 1 to n_node do
compute d(input matrix i, weight matrix k)
end
Best Matching Unit = winning node = node with the smallest distance
For k <- 1 to n_node do
update weight matrix
end
end
Update weight mi(t + 1) = mi(t) + ⍺(t) * hci(t) [x(t) - mi(t)]
Neighborhood function hci(t) = h(dist(rc, ri), t)
rc, ri: location vectors of node c and i
if Gaussian:
hci(t) = exp(-dist^2 / (2 * σ^2(t)))
Radius: σ(t) = σ_0 * exp(-t / ƛ)
Learning rate: ⍺(t) = ⍺_0 * exp(-t / ƛ)
"""
def __init__(
self, data, xdim, ydim, topo = "rectangular", neighbor = "gaussian",
dist = "frobenius", decay = "exponential", seed = None
):
"""
:param data: 3d array. processed data set for Online SOM Detector
:param xdim: Number of x-grid
:param ydim: Number of y-grid
:param topo: Topology of output space - rectangular or hexagonal
:param neighbor: Neighborhood function - gaussian, bubble, or triangular
:param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or
:param decay: decaying learning rate and radius - exponential or linear
:param seed: Random seed
"""
np.random.seed(seed = seed)
if xdim is None or ydim is None:
xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))
ydim = xdim
self.net_dim = np.array([xdim, ydim])
self.ncol = data.shape[2]
self.nrow = data.shape[1]
# Initialize codebook matrix
self.init_weight()
# Topology
topo_types = ["rectangular", "hexagonal"]
if topo not in topo_types:
raise ValueError("Invalid topo. Expected one of: %s" % topo_types)
self.topo = topo
self.init_grid()
self.dist_node()
# Neighborhood function
neighbor_types = ["gaussian", "bubble", "triangular"]
if neighbor not in neighbor_types:
raise ValueError("Invalid neighbor. Expected one of: %s" % neighbor_types)
self.neighbor_func = neighbor
# Distance function
dist_type = ["frobenius", "nuclear", "mahalanobis", "eros"]
if dist not in dist_type:
raise ValueError("Invalid dist. Expected one of: %s" % dist_type)
self.dist_func = dist
# Decay
decay_types = ["exponential", "linear"]
if decay not in decay_types:
raise ValueError("Invalid decay. Expected one of: %s" % decay_types)
self.decay_func = decay
# som()
self.epoch = None
self.alpha = None
self.sigma = None
self.initial_learn = None
self.initial_r = None
# find_bmu()
self.bmu = None
# plot
self.reconstruction_error = None
self.dist_normal = None
self.project = None
def init_weight(self):
self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.nrow, self.ncol)
def init_grid(self):
"""
[row_pts, col_pts]
xdim x ydim rows (points)
[1,1]
[2,1]
[1,2]
[2,2]
2--------->
1--------->^
"""
self.pts = np.array(
np.meshgrid(
np.arange(self.net_dim[0]) + 1,
np.arange(self.net_dim[1]) + 1
)
).reshape(2, np.prod(self.net_dim)).T
if self.topo == "hexagonal":
self.pts[:, 0] = self.pts[:, 0] + .5 * (self.pts[:, 1] % 2)
self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]
def som(self, data, epoch = 100, init_rate = None, init_radius = None, keep_net = False):
"""
:param data: 3d array. processed data set for Online SOM Detector
:param epoch: epoch number
:param init_rate: initial learning rate
:param init_radius: initial radius of BMU neighborhood
:param keep_net: keep every weight matrix path?
"""
num_obs = data.shape[0]
obs_id = np.arange(num_obs)
chose_i = np.empty(1)
node_id = None
hci = None
self.epoch = epoch
if keep_net:
self.net_path = np.empty(
(self.epoch, self.net_dim[0] * self.net_dim[1], self.nrow, self.ncol)
)
# learning rate
if init_rate is None:
init_rate = .1
self.alpha = init_rate
self.initial_learn = init_rate
# radius of neighborhood
if init_radius is None:
init_radius = np.quantile(self.dci, q = 2 / 3, axis = None)
self.sigma = init_radius
self.initial_r = init_radius
# time constant (lambda)
rate_constant = epoch
radius_constant = epoch / np.log(self.sigma)
# distance between nodes
bmu_dist = self.dci[1, :]
rcst_err = np.empty(epoch)
for i in tqdm(range(epoch), desc = "epoch"):
chose_i = int(np.random.choice(obs_id, size = 1))
# BMU - self.bmu
self.find_bmu(data, chose_i)
# reconstruction error - sum of distances from BMU
rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu.astype(int))) for j in range(data.shape[0])])
bmu_dist = self.dci[self.bmu.astype(int), :].flatten()
# decay
self.sigma = self.decay(init_radius, i + 1, radius_constant)
self.alpha = self.decay(init_rate, i + 1, rate_constant)
# neighboring nodes (includes BMU)
neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()
for k in tqdm(range(neighbor_neuron.shape[0]), desc = "updating"):
node_id = neighbor_neuron[k]
hci = self.neighborhood(bmu_dist[node_id], self.sigma)
# update codebook matrices of neighboring nodes
self.net[node_id, :, :] += \
self.alpha * hci * \
(data[chose_i, :, :] - self.net[node_id, :, :]).reshape((self.nrow, self.ncol))
if keep_net:
self.net_path[i, :, :, :] = self.net
self.reconstruction_error = pd.DataFrame({"Epoch": np.arange(self.epoch) + 1, "Reconstruction Error": rcst_err})
def find_bmu(self, data, index):
"""
:param data: Processed data set for SOM.
:param index: Randomly chosen observation id for input matrix among 3d tensor set.
"""
dist_code = np.asarray([self.dist_mat(data, index, j) for j in range(self.net.shape[0])])
self.bmu = np.argmin(dist_code)
def dist_mat(self, data, index, node):
"""
:param data: Processed data set for SOM.
:param index: Randomly chosen observation id for input matrix among 3d tensor set.
:param node: node index
:return: distance between input matrix observation and weight matrix of the node
"""
if self.dist_func == "frobenius":
return np.linalg.norm(data[index, :, :] - self.net[node, :, :], "fro")
elif self.dist_func == "nuclear":
return np.linalg.norm(data[index, :, :] - self.net[node, :, :], "nuc")
elif self.dist_func == "mahalanobis":
x = data[index, :, :] - self.net[node, :, :]
covmat = np.cov(x, rowvar = False)
# spectral decomposition sigma = udu.T
w, v = np.linalg.eigh(covmat)
# inverse = ud^-1u.T
w[w == 0] += .0001
covinv = v.dot(np.diag(1 / w)).dot(v.T)
ss = x.dot(covinv).dot(x.T)
return np.sqrt(np.trace(ss))
elif self.dist_func == "eros":
x = data[index, :, :] - self.net[node, :, :]
covmat = np.cov(x, rowvar = False)
# svd(covariance)
u, s, vh = randomized_svd(covmat, n_components = covmat.shape[1], n_iter = 1, random_state = None)
# normalize eigenvalue
w = s / s.sum()
# distance
ss = np.multiply(vh, w).dot(vh.T)
return np.sqrt(np.trace(ss))
def dist_node(self):
"""
:return: distance matrix of SOM neuron
"""
if self.topo == "hexagonal":
self.dci = distance.cdist(self.pts, self.pts, "euclidean")
elif self.topo == "rectangular":
self.dci = distance.cdist(self.pts, self.pts, "chebyshev")
def decay(self, init, time, time_constant):
"""
:param init: initial value
:param time: t
:param time_constant: lambda
:return: decaying value of alpha or sigma
"""
if self.decay_func == "exponential":
return init * np.exp(-time / time_constant)
elif self.decay_func == "linear":
return init * (1 - time / time_constant)
def neighborhood(self, node_distance, radius):
"""
:param node_distance: Distance between SOM neurons
:param radius: Radius of BMU neighborhood
:return: Neighborhood function hci
"""
if self.neighbor_func == "gaussian":
return np.exp(-node_distance ** 2 / (2 * (radius ** 2)))
elif self.neighbor_func == "bubble":
if node_distance <= radius:
return 1.0
else:
return 0.0
elif self.neighbor_func == "triangular":
if node_distance <= radius:
return 1 - np.abs(node_distance) / radius
else:
return 0.0
def dist_weight(self, data, index):
"""
:param data: Processed data set for SOM
:param index: index for data
:return: minimum distance between input matrix and weight matrices, its node id (BMU)
"""
dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(range(self.net.shape[0]), desc = "bmu")])
return np.min(dist_wt), np.argmin(dist_wt)
def plot_error(self):
"""
:return: line plot of reconstruction error versus epoch
"""
fig = px.line(self.reconstruction_error, x = "Epoch", y = "Reconstruction Error")
fig.show()
def plot_heatmap(self, data):
"""
:return: Heatmap for SOM
"""
if self.project is None:
normal_distance = np.asarray(
[self.dist_weight(data, i) for i in tqdm(range(data.shape[0]), desc="mapping")]
)
self.dist_normal = normal_distance[:, 0]
self.project = normal_distance[:, 1]
x = self.project % self.net_dim[0]
y = self.project // self.net_dim[0]
if self.topo == "rectangular":
fig = go.Figure(
go.Histogram2d(
x = x,
y = y,
colorscale = "Viridis"
)
)
fig.show()
elif self.topo == "hexagonal":
x = x + .5 * (y % 2)
y = np.sqrt(3) / 2 * y
# plt_hex = plt.hexbin(x, y)
# plt.close()
# fig = tls.mpl_to_plotly(plt_hex)
plt.hexbin(x, y)
plt.show()
|
[
"import numpy as np\nimport pandas as pd\nimport plotly.express as px\nimport plotly.graph_objs as go\n# import plotly.tools as tls\nimport matplotlib.pyplot as plt\nfrom scipy.spatial import distance\nfrom sklearn.utils.extmath import randomized_svd\nfrom tqdm import tqdm\n\n\nclass kohonen:\n \"\"\"\n Matrix SOM\n Initialize weight matrix\n For epoch <- 1 to N do\n Choose input matrix observation randomly - i\n For k <- 1 to n_node do\n compute d(input matrix i, weight matrix k)\n end\n Best Matching Unit = winning node = node with the smallest distance\n For k <- 1 to n_node do\n update weight matrix\n end\n end\n\n Update weight mi(t + 1) = mi(t) + ⍺(t) * hci(t) [x(t) - mi(t)]\n Neighborhood function hci(t) = h(dist(rc, ri), t)\n rc, ri: location vectors of node c and i\n if Gaussian:\n hci(t) = exp(-dist^2 / (2 * σ^2(t)))\n Radius: σ(t) = σ_0 * exp(-t / ƛ)\n Learning rate: ⍺(t) = ⍺_0 * exp(-t / ƛ)\n \"\"\"\n\n def __init__(\n self, data, xdim, ydim, topo = \"rectangular\", neighbor = \"gaussian\",\n dist = \"frobenius\", decay = \"exponential\", seed = None\n ):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed = seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n # Initialize codebook matrix\n self.init_weight()\n # Topology\n topo_types = [\"rectangular\", \"hexagonal\"]\n if topo not in topo_types:\n raise ValueError(\"Invalid topo. Expected one of: %s\" % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n # Neighborhood function\n neighbor_types = [\"gaussian\", \"bubble\", \"triangular\"]\n if neighbor not in neighbor_types:\n raise ValueError(\"Invalid neighbor. Expected one of: %s\" % neighbor_types)\n self.neighbor_func = neighbor\n # Distance function\n dist_type = [\"frobenius\", \"nuclear\", \"mahalanobis\", \"eros\"]\n if dist not in dist_type:\n raise ValueError(\"Invalid dist. Expected one of: %s\" % dist_type)\n self.dist_func = dist\n # Decay\n decay_types = [\"exponential\", \"linear\"]\n if decay not in decay_types:\n raise ValueError(\"Invalid decay. Expected one of: %s\" % decay_types)\n self.decay_func = decay\n # som()\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n # find_bmu()\n self.bmu = None\n # plot\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(\n np.meshgrid(\n np.arange(self.net_dim[0]) + 1,\n np.arange(self.net_dim[1]) + 1\n )\n ).reshape(2, np.prod(self.net_dim)).T\n if self.topo == \"hexagonal\":\n self.pts[:, 0] = self.pts[:, 0] + .5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch = 100, init_rate = None, init_radius = None, keep_net = False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty(\n (self.epoch, self.net_dim[0] * self.net_dim[1], self.nrow, self.ncol)\n )\n # learning rate\n if init_rate is None:\n init_rate = .1\n self.alpha = init_rate\n self.initial_learn = init_rate\n # radius of neighborhood\n if init_radius is None:\n init_radius = np.quantile(self.dci, q = 2 / 3, axis = None)\n self.sigma = init_radius\n self.initial_r = init_radius\n # time constant (lambda)\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n # distance between nodes\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc = \"epoch\"):\n chose_i = int(np.random.choice(obs_id, size = 1))\n # BMU - self.bmu\n self.find_bmu(data, chose_i)\n # reconstruction error - sum of distances from BMU\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu.astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n # decay\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n # neighboring nodes (includes BMU)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc = \"updating\"):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n # update codebook matrices of neighboring nodes\n self.net[node_id, :, :] += \\\n self.alpha * hci * \\\n (data[chose_i, :, :] - self.net[node_id, :, :]).reshape((self.nrow, self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({\"Epoch\": np.arange(self.epoch) + 1, \"Reconstruction Error\": rcst_err})\n\n def find_bmu(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n \"\"\"\n dist_code = np.asarray([self.dist_mat(data, index, j) for j in range(self.net.shape[0])])\n self.bmu = np.argmin(dist_code)\n\n def dist_mat(self, data, index, node):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n :param node: node index\n :return: distance between input matrix observation and weight matrix of the node\n \"\"\"\n if self.dist_func == \"frobenius\":\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :], \"fro\")\n elif self.dist_func == \"nuclear\":\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :], \"nuc\")\n elif self.dist_func == \"mahalanobis\":\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar = False)\n # spectral decomposition sigma = udu.T\n w, v = np.linalg.eigh(covmat)\n # inverse = ud^-1u.T\n w[w == 0] += .0001\n covinv = v.dot(np.diag(1 / w)).dot(v.T)\n ss = x.dot(covinv).dot(x.T)\n return np.sqrt(np.trace(ss))\n elif self.dist_func == \"eros\":\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar = False)\n # svd(covariance)\n u, s, vh = randomized_svd(covmat, n_components = covmat.shape[1], n_iter = 1, random_state = None)\n # normalize eigenvalue\n w = s / s.sum()\n # distance\n ss = np.multiply(vh, w).dot(vh.T)\n return np.sqrt(np.trace(ss))\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == \"hexagonal\":\n self.dci = distance.cdist(self.pts, self.pts, \"euclidean\")\n elif self.topo == \"rectangular\":\n self.dci = distance.cdist(self.pts, self.pts, \"chebyshev\")\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == \"exponential\":\n return init * np.exp(-time / time_constant)\n elif self.decay_func == \"linear\":\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == \"gaussian\":\n return np.exp(-node_distance ** 2 / (2 * (radius ** 2)))\n elif self.neighbor_func == \"bubble\":\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == \"triangular\":\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(range(self.net.shape[0]), desc = \"bmu\")])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x = \"Epoch\", y = \"Reconstruction Error\")\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray(\n [self.dist_weight(data, i) for i in tqdm(range(data.shape[0]), desc=\"mapping\")]\n )\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == \"rectangular\":\n fig = go.Figure(\n go.Histogram2d(\n x = x,\n y = y,\n colorscale = \"Viridis\"\n )\n )\n fig.show()\n elif self.topo == \"hexagonal\":\n x = x + .5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n # plt_hex = plt.hexbin(x, y)\n # plt.close()\n # fig = tls.mpl_to_plotly(plt_hex)\n plt.hexbin(x, y)\n plt.show()\n",
"import numpy as np\nimport pandas as pd\nimport plotly.express as px\nimport plotly.graph_objs as go\nimport matplotlib.pyplot as plt\nfrom scipy.spatial import distance\nfrom sklearn.utils.extmath import randomized_svd\nfrom tqdm import tqdm\n\n\nclass kohonen:\n \"\"\"\n Matrix SOM\n Initialize weight matrix\n For epoch <- 1 to N do\n Choose input matrix observation randomly - i\n For k <- 1 to n_node do\n compute d(input matrix i, weight matrix k)\n end\n Best Matching Unit = winning node = node with the smallest distance\n For k <- 1 to n_node do\n update weight matrix\n end\n end\n\n Update weight mi(t + 1) = mi(t) + ⍺(t) * hci(t) [x(t) - mi(t)]\n Neighborhood function hci(t) = h(dist(rc, ri), t)\n rc, ri: location vectors of node c and i\n if Gaussian:\n hci(t) = exp(-dist^2 / (2 * σ^2(t)))\n Radius: σ(t) = σ_0 * exp(-t / ƛ)\n Learning rate: ⍺(t) = ⍺_0 * exp(-t / ƛ)\n \"\"\"\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n\n def find_bmu(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n \"\"\"\n dist_code = np.asarray([self.dist_mat(data, index, j) for j in\n range(self.net.shape[0])])\n self.bmu = np.argmin(dist_code)\n\n def dist_mat(self, data, index, node):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n :param node: node index\n :return: distance between input matrix observation and weight matrix of the node\n \"\"\"\n if self.dist_func == 'frobenius':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'fro')\n elif self.dist_func == 'nuclear':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'nuc')\n elif self.dist_func == 'mahalanobis':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n w, v = np.linalg.eigh(covmat)\n w[w == 0] += 0.0001\n covinv = v.dot(np.diag(1 / w)).dot(v.T)\n ss = x.dot(covinv).dot(x.T)\n return np.sqrt(np.trace(ss))\n elif self.dist_func == 'eros':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n u, s, vh = randomized_svd(covmat, n_components=covmat.shape[1],\n n_iter=1, random_state=None)\n w = s / s.sum()\n ss = np.multiply(vh, w).dot(vh.T)\n return np.sqrt(np.trace(ss))\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray([self.dist_weight(data, i) for i in\n tqdm(range(data.shape[0]), desc='mapping')])\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == 'rectangular':\n fig = go.Figure(go.Histogram2d(x=x, y=y, colorscale='Viridis'))\n fig.show()\n elif self.topo == 'hexagonal':\n x = x + 0.5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n plt.hexbin(x, y)\n plt.show()\n",
"<import token>\n\n\nclass kohonen:\n \"\"\"\n Matrix SOM\n Initialize weight matrix\n For epoch <- 1 to N do\n Choose input matrix observation randomly - i\n For k <- 1 to n_node do\n compute d(input matrix i, weight matrix k)\n end\n Best Matching Unit = winning node = node with the smallest distance\n For k <- 1 to n_node do\n update weight matrix\n end\n end\n\n Update weight mi(t + 1) = mi(t) + ⍺(t) * hci(t) [x(t) - mi(t)]\n Neighborhood function hci(t) = h(dist(rc, ri), t)\n rc, ri: location vectors of node c and i\n if Gaussian:\n hci(t) = exp(-dist^2 / (2 * σ^2(t)))\n Radius: σ(t) = σ_0 * exp(-t / ƛ)\n Learning rate: ⍺(t) = ⍺_0 * exp(-t / ƛ)\n \"\"\"\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n\n def find_bmu(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n \"\"\"\n dist_code = np.asarray([self.dist_mat(data, index, j) for j in\n range(self.net.shape[0])])\n self.bmu = np.argmin(dist_code)\n\n def dist_mat(self, data, index, node):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n :param node: node index\n :return: distance between input matrix observation and weight matrix of the node\n \"\"\"\n if self.dist_func == 'frobenius':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'fro')\n elif self.dist_func == 'nuclear':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'nuc')\n elif self.dist_func == 'mahalanobis':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n w, v = np.linalg.eigh(covmat)\n w[w == 0] += 0.0001\n covinv = v.dot(np.diag(1 / w)).dot(v.T)\n ss = x.dot(covinv).dot(x.T)\n return np.sqrt(np.trace(ss))\n elif self.dist_func == 'eros':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n u, s, vh = randomized_svd(covmat, n_components=covmat.shape[1],\n n_iter=1, random_state=None)\n w = s / s.sum()\n ss = np.multiply(vh, w).dot(vh.T)\n return np.sqrt(np.trace(ss))\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray([self.dist_weight(data, i) for i in\n tqdm(range(data.shape[0]), desc='mapping')])\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == 'rectangular':\n fig = go.Figure(go.Histogram2d(x=x, y=y, colorscale='Viridis'))\n fig.show()\n elif self.topo == 'hexagonal':\n x = x + 0.5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n plt.hexbin(x, y)\n plt.show()\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n\n def find_bmu(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n \"\"\"\n dist_code = np.asarray([self.dist_mat(data, index, j) for j in\n range(self.net.shape[0])])\n self.bmu = np.argmin(dist_code)\n\n def dist_mat(self, data, index, node):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n :param node: node index\n :return: distance between input matrix observation and weight matrix of the node\n \"\"\"\n if self.dist_func == 'frobenius':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'fro')\n elif self.dist_func == 'nuclear':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'nuc')\n elif self.dist_func == 'mahalanobis':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n w, v = np.linalg.eigh(covmat)\n w[w == 0] += 0.0001\n covinv = v.dot(np.diag(1 / w)).dot(v.T)\n ss = x.dot(covinv).dot(x.T)\n return np.sqrt(np.trace(ss))\n elif self.dist_func == 'eros':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n u, s, vh = randomized_svd(covmat, n_components=covmat.shape[1],\n n_iter=1, random_state=None)\n w = s / s.sum()\n ss = np.multiply(vh, w).dot(vh.T)\n return np.sqrt(np.trace(ss))\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray([self.dist_weight(data, i) for i in\n tqdm(range(data.shape[0]), desc='mapping')])\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == 'rectangular':\n fig = go.Figure(go.Histogram2d(x=x, y=y, colorscale='Viridis'))\n fig.show()\n elif self.topo == 'hexagonal':\n x = x + 0.5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n plt.hexbin(x, y)\n plt.show()\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n\n def dist_mat(self, data, index, node):\n \"\"\"\n :param data: Processed data set for SOM.\n :param index: Randomly chosen observation id for input matrix among 3d tensor set.\n :param node: node index\n :return: distance between input matrix observation and weight matrix of the node\n \"\"\"\n if self.dist_func == 'frobenius':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'fro')\n elif self.dist_func == 'nuclear':\n return np.linalg.norm(data[index, :, :] - self.net[node, :, :],\n 'nuc')\n elif self.dist_func == 'mahalanobis':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n w, v = np.linalg.eigh(covmat)\n w[w == 0] += 0.0001\n covinv = v.dot(np.diag(1 / w)).dot(v.T)\n ss = x.dot(covinv).dot(x.T)\n return np.sqrt(np.trace(ss))\n elif self.dist_func == 'eros':\n x = data[index, :, :] - self.net[node, :, :]\n covmat = np.cov(x, rowvar=False)\n u, s, vh = randomized_svd(covmat, n_components=covmat.shape[1],\n n_iter=1, random_state=None)\n w = s / s.sum()\n ss = np.multiply(vh, w).dot(vh.T)\n return np.sqrt(np.trace(ss))\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray([self.dist_weight(data, i) for i in\n tqdm(range(data.shape[0]), desc='mapping')])\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == 'rectangular':\n fig = go.Figure(go.Histogram2d(x=x, y=y, colorscale='Viridis'))\n fig.show()\n elif self.topo == 'hexagonal':\n x = x + 0.5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n plt.hexbin(x, y)\n plt.show()\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n\n def plot_heatmap(self, data):\n \"\"\"\n :return: Heatmap for SOM\n \"\"\"\n if self.project is None:\n normal_distance = np.asarray([self.dist_weight(data, i) for i in\n tqdm(range(data.shape[0]), desc='mapping')])\n self.dist_normal = normal_distance[:, 0]\n self.project = normal_distance[:, 1]\n x = self.project % self.net_dim[0]\n y = self.project // self.net_dim[0]\n if self.topo == 'rectangular':\n fig = go.Figure(go.Histogram2d(x=x, y=y, colorscale='Viridis'))\n fig.show()\n elif self.topo == 'hexagonal':\n x = x + 0.5 * (y % 2)\n y = np.sqrt(3) / 2 * y\n plt.hexbin(x, y)\n plt.show()\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n\n def init_weight(self):\n self.net = np.random.rand(self.net_dim[0] * self.net_dim[1], self.\n nrow, self.ncol)\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n <function token>\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n\n def dist_node(self):\n \"\"\"\n :return: distance matrix of SOM neuron\n \"\"\"\n if self.topo == 'hexagonal':\n self.dci = distance.cdist(self.pts, self.pts, 'euclidean')\n elif self.topo == 'rectangular':\n self.dci = distance.cdist(self.pts, self.pts, 'chebyshev')\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n\n def __init__(self, data, xdim, ydim, topo='rectangular', neighbor=\n 'gaussian', dist='frobenius', decay='exponential', seed=None):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param xdim: Number of x-grid\n :param ydim: Number of y-grid\n :param topo: Topology of output space - rectangular or hexagonal\n :param neighbor: Neighborhood function - gaussian, bubble, or triangular\n :param dist: Distance function - frobenius, nuclear, mahalanobis (just form of mahalanobis), or\n :param decay: decaying learning rate and radius - exponential or linear\n :param seed: Random seed\n \"\"\"\n np.random.seed(seed=seed)\n if xdim is None or ydim is None:\n xdim = int(np.sqrt(5 * np.sqrt(data.shape[0])))\n ydim = xdim\n self.net_dim = np.array([xdim, ydim])\n self.ncol = data.shape[2]\n self.nrow = data.shape[1]\n self.init_weight()\n topo_types = ['rectangular', 'hexagonal']\n if topo not in topo_types:\n raise ValueError('Invalid topo. Expected one of: %s' % topo_types)\n self.topo = topo\n self.init_grid()\n self.dist_node()\n neighbor_types = ['gaussian', 'bubble', 'triangular']\n if neighbor not in neighbor_types:\n raise ValueError('Invalid neighbor. Expected one of: %s' %\n neighbor_types)\n self.neighbor_func = neighbor\n dist_type = ['frobenius', 'nuclear', 'mahalanobis', 'eros']\n if dist not in dist_type:\n raise ValueError('Invalid dist. Expected one of: %s' % dist_type)\n self.dist_func = dist\n decay_types = ['exponential', 'linear']\n if decay not in decay_types:\n raise ValueError('Invalid decay. Expected one of: %s' % decay_types\n )\n self.decay_func = decay\n self.epoch = None\n self.alpha = None\n self.sigma = None\n self.initial_learn = None\n self.initial_r = None\n self.bmu = None\n self.reconstruction_error = None\n self.dist_normal = None\n self.project = None\n <function token>\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n\n def plot_error(self):\n \"\"\"\n :return: line plot of reconstruction error versus epoch\n \"\"\"\n fig = px.line(self.reconstruction_error, x='Epoch', y=\n 'Reconstruction Error')\n fig.show()\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n\n def init_grid(self):\n \"\"\"\n [row_pts, col_pts]\n xdim x ydim rows (points)\n [1,1]\n [2,1]\n [1,2]\n [2,2]\n 2--------->\n 1--------->^\n \"\"\"\n self.pts = np.array(np.meshgrid(np.arange(self.net_dim[0]) + 1, np.\n arange(self.net_dim[1]) + 1)).reshape(2, np.prod(self.net_dim)).T\n if self.topo == 'hexagonal':\n self.pts[:, 0] = self.pts[:, 0] + 0.5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n\n def decay(self, init, time, time_constant):\n \"\"\"\n :param init: initial value\n :param time: t\n :param time_constant: lambda\n :return: decaying value of alpha or sigma\n \"\"\"\n if self.decay_func == 'exponential':\n return init * np.exp(-time / time_constant)\n elif self.decay_func == 'linear':\n return init * (1 - time / time_constant)\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n <function token>\n\n def neighborhood(self, node_distance, radius):\n \"\"\"\n :param node_distance: Distance between SOM neurons\n :param radius: Radius of BMU neighborhood\n :return: Neighborhood function hci\n \"\"\"\n if self.neighbor_func == 'gaussian':\n return np.exp(-node_distance ** 2 / (2 * radius ** 2))\n elif self.neighbor_func == 'bubble':\n if node_distance <= radius:\n return 1.0\n else:\n return 0.0\n elif self.neighbor_func == 'triangular':\n if node_distance <= radius:\n return 1 - np.abs(node_distance) / radius\n else:\n return 0.0\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def som(self, data, epoch=100, init_rate=None, init_radius=None,\n keep_net=False):\n \"\"\"\n :param data: 3d array. processed data set for Online SOM Detector\n :param epoch: epoch number\n :param init_rate: initial learning rate\n :param init_radius: initial radius of BMU neighborhood\n :param keep_net: keep every weight matrix path?\n \"\"\"\n num_obs = data.shape[0]\n obs_id = np.arange(num_obs)\n chose_i = np.empty(1)\n node_id = None\n hci = None\n self.epoch = epoch\n if keep_net:\n self.net_path = np.empty((self.epoch, self.net_dim[0] * self.\n net_dim[1], self.nrow, self.ncol))\n if init_rate is None:\n init_rate = 0.1\n self.alpha = init_rate\n self.initial_learn = init_rate\n if init_radius is None:\n init_radius = np.quantile(self.dci, q=2 / 3, axis=None)\n self.sigma = init_radius\n self.initial_r = init_radius\n rate_constant = epoch\n radius_constant = epoch / np.log(self.sigma)\n bmu_dist = self.dci[1, :]\n rcst_err = np.empty(epoch)\n for i in tqdm(range(epoch), desc='epoch'):\n chose_i = int(np.random.choice(obs_id, size=1))\n self.find_bmu(data, chose_i)\n rcst_err[i] = np.sum([np.square(self.dist_mat(data, j, self.bmu\n .astype(int))) for j in range(data.shape[0])])\n bmu_dist = self.dci[self.bmu.astype(int), :].flatten()\n self.sigma = self.decay(init_radius, i + 1, radius_constant)\n self.alpha = self.decay(init_rate, i + 1, rate_constant)\n neighbor_neuron = np.argwhere(bmu_dist <= self.sigma).flatten()\n for k in tqdm(range(neighbor_neuron.shape[0]), desc='updating'):\n node_id = neighbor_neuron[k]\n hci = self.neighborhood(bmu_dist[node_id], self.sigma)\n self.net[node_id, :, :] += self.alpha * hci * (data[chose_i,\n :, :] - self.net[node_id, :, :]).reshape((self.nrow,\n self.ncol))\n if keep_net:\n self.net_path[i, :, :, :] = self.net\n self.reconstruction_error = pd.DataFrame({'Epoch': np.arange(self.\n epoch) + 1, 'Reconstruction Error': rcst_err})\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def dist_weight(self, data, index):\n \"\"\"\n :param data: Processed data set for SOM\n :param index: index for data\n :return: minimum distance between input matrix and weight matrices, its node id (BMU)\n \"\"\"\n dist_wt = np.asarray([self.dist_mat(data, index, j) for j in tqdm(\n range(self.net.shape[0]), desc='bmu')])\n return np.min(dist_wt), np.argmin(dist_wt)\n <function token>\n <function token>\n",
"<import token>\n\n\nclass kohonen:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,058 |
1af7c0b42ca65a0926cf4f903393f4987f9346de
|
class Solution:
def allPathsSourceTarget(self, graph):
"""
:type graph: List[List[int]]
:rtype: List[List[int]]
"""
result_list = []
_ = self.dfs(graph, result_list, 0, [0])
return result_list
def dfs(self, graph, result_list, pos, path):
if not graph[pos]:
result_list.append(path)
return
for v in graph[pos]:
_ = self.dfs(graph, result_list, v, path + [v])
|
[
"class Solution:\n def allPathsSourceTarget(self, graph):\n \"\"\"\n :type graph: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n result_list = []\n \n _ = self.dfs(graph, result_list, 0, [0])\n \n return result_list\n \n def dfs(self, graph, result_list, pos, path):\n if not graph[pos]:\n result_list.append(path)\n return\n \n for v in graph[pos]:\n _ = self.dfs(graph, result_list, v, path + [v])",
"class Solution:\n\n def allPathsSourceTarget(self, graph):\n \"\"\"\n :type graph: List[List[int]]\n :rtype: List[List[int]]\n \"\"\"\n result_list = []\n _ = self.dfs(graph, result_list, 0, [0])\n return result_list\n\n def dfs(self, graph, result_list, pos, path):\n if not graph[pos]:\n result_list.append(path)\n return\n for v in graph[pos]:\n _ = self.dfs(graph, result_list, v, path + [v])\n",
"class Solution:\n <function token>\n\n def dfs(self, graph, result_list, pos, path):\n if not graph[pos]:\n result_list.append(path)\n return\n for v in graph[pos]:\n _ = self.dfs(graph, result_list, v, path + [v])\n",
"class Solution:\n <function token>\n <function token>\n",
"<class token>\n"
] | false |
99,059 |
c0027e8daa689c6898e83f0df320bc92b3d0a030
|
# coding:utf8
from PIL import Image
from PIL import ImageEnhance
from PIL import ImageFilter
import math
import os
import time
from xml.dom.minidom import parse
import xml.dom.minidom
DIR_ROOT = "assets\\atlas_ui"
# 使用minidom解析器打开 XML 文档
DOMTree = xml.dom.minidom.parse(DIR_ROOT + "\\package.xml")
movies = DOMTree.getElementsByTagName("image")
isSet = False
for data in movies:
dataName = data.getAttribute("name")
if dataName.find("@") != -1 and dataName.find("_") != -1 and not data.getAttribute("scale"):
isSet = True
print(dataName)
arr = dataName.split("@")[1].split(".")[0].split("_")
img = Image.open(DIR_ROOT + data.getAttribute("path") + dataName)
imgW = img.size[0]
imgH = img.size[1]
outData = [arr[0], arr[1]]
outData.append(str(imgW - int(arr[0]) - int(arr[2])))
outData.append(str(imgH - int(arr[1]) - int(arr[3])))
scale9grid = ",".join(outData)
print("scale9grid => " + scale9grid)
data.setAttribute('scale', "9grid") #设置attrib
data.setAttribute('scale9grid', scale9grid) #设置attrib
if isSet:
with open(DIR_ROOT + "\\package.xml", "w") as f:
DOMTree.writexml(f,indent='',addindent='',newl='',encoding='UTF-8')
|
[
"# coding:utf8\n\nfrom PIL import Image\nfrom PIL import ImageEnhance\nfrom PIL import ImageFilter\nimport math\nimport os\nimport time\n\nfrom xml.dom.minidom import parse\nimport xml.dom.minidom\n\nDIR_ROOT = \"assets\\\\atlas_ui\"\n# 使用minidom解析器打开 XML 文档\nDOMTree = xml.dom.minidom.parse(DIR_ROOT + \"\\\\package.xml\")\nmovies = DOMTree.getElementsByTagName(\"image\")\nisSet = False\nfor data in movies:\n dataName = data.getAttribute(\"name\")\n if dataName.find(\"@\") != -1 and dataName.find(\"_\") != -1 and not data.getAttribute(\"scale\"):\n isSet = True\n print(dataName)\n arr = dataName.split(\"@\")[1].split(\".\")[0].split(\"_\")\n img = Image.open(DIR_ROOT + data.getAttribute(\"path\") + dataName)\n imgW = img.size[0]\n imgH = img.size[1]\n outData = [arr[0], arr[1]]\n outData.append(str(imgW - int(arr[0]) - int(arr[2])))\n outData.append(str(imgH - int(arr[1]) - int(arr[3])))\n scale9grid = \",\".join(outData)\n print(\"scale9grid => \" + scale9grid)\n data.setAttribute('scale', \"9grid\") #设置attrib\n data.setAttribute('scale9grid', scale9grid) #设置attrib\nif isSet:\n with open(DIR_ROOT + \"\\\\package.xml\", \"w\") as f:\n DOMTree.writexml(f,indent='',addindent='',newl='',encoding='UTF-8')",
"from PIL import Image\nfrom PIL import ImageEnhance\nfrom PIL import ImageFilter\nimport math\nimport os\nimport time\nfrom xml.dom.minidom import parse\nimport xml.dom.minidom\nDIR_ROOT = 'assets\\\\atlas_ui'\nDOMTree = xml.dom.minidom.parse(DIR_ROOT + '\\\\package.xml')\nmovies = DOMTree.getElementsByTagName('image')\nisSet = False\nfor data in movies:\n dataName = data.getAttribute('name')\n if dataName.find('@') != -1 and dataName.find('_'\n ) != -1 and not data.getAttribute('scale'):\n isSet = True\n print(dataName)\n arr = dataName.split('@')[1].split('.')[0].split('_')\n img = Image.open(DIR_ROOT + data.getAttribute('path') + dataName)\n imgW = img.size[0]\n imgH = img.size[1]\n outData = [arr[0], arr[1]]\n outData.append(str(imgW - int(arr[0]) - int(arr[2])))\n outData.append(str(imgH - int(arr[1]) - int(arr[3])))\n scale9grid = ','.join(outData)\n print('scale9grid => ' + scale9grid)\n data.setAttribute('scale', '9grid')\n data.setAttribute('scale9grid', scale9grid)\nif isSet:\n with open(DIR_ROOT + '\\\\package.xml', 'w') as f:\n DOMTree.writexml(f, indent='', addindent='', newl='', encoding='UTF-8')\n",
"<import token>\nDIR_ROOT = 'assets\\\\atlas_ui'\nDOMTree = xml.dom.minidom.parse(DIR_ROOT + '\\\\package.xml')\nmovies = DOMTree.getElementsByTagName('image')\nisSet = False\nfor data in movies:\n dataName = data.getAttribute('name')\n if dataName.find('@') != -1 and dataName.find('_'\n ) != -1 and not data.getAttribute('scale'):\n isSet = True\n print(dataName)\n arr = dataName.split('@')[1].split('.')[0].split('_')\n img = Image.open(DIR_ROOT + data.getAttribute('path') + dataName)\n imgW = img.size[0]\n imgH = img.size[1]\n outData = [arr[0], arr[1]]\n outData.append(str(imgW - int(arr[0]) - int(arr[2])))\n outData.append(str(imgH - int(arr[1]) - int(arr[3])))\n scale9grid = ','.join(outData)\n print('scale9grid => ' + scale9grid)\n data.setAttribute('scale', '9grid')\n data.setAttribute('scale9grid', scale9grid)\nif isSet:\n with open(DIR_ROOT + '\\\\package.xml', 'w') as f:\n DOMTree.writexml(f, indent='', addindent='', newl='', encoding='UTF-8')\n",
"<import token>\n<assignment token>\nfor data in movies:\n dataName = data.getAttribute('name')\n if dataName.find('@') != -1 and dataName.find('_'\n ) != -1 and not data.getAttribute('scale'):\n isSet = True\n print(dataName)\n arr = dataName.split('@')[1].split('.')[0].split('_')\n img = Image.open(DIR_ROOT + data.getAttribute('path') + dataName)\n imgW = img.size[0]\n imgH = img.size[1]\n outData = [arr[0], arr[1]]\n outData.append(str(imgW - int(arr[0]) - int(arr[2])))\n outData.append(str(imgH - int(arr[1]) - int(arr[3])))\n scale9grid = ','.join(outData)\n print('scale9grid => ' + scale9grid)\n data.setAttribute('scale', '9grid')\n data.setAttribute('scale9grid', scale9grid)\nif isSet:\n with open(DIR_ROOT + '\\\\package.xml', 'w') as f:\n DOMTree.writexml(f, indent='', addindent='', newl='', encoding='UTF-8')\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
99,060 |
ae15754551d2c81532324da61efef68053a394a8
|
import numpy as np
import pylab as pl
file = open("trainingdata.txt",buffering=1)
nDoc = int(file.readline())
|
[
"import numpy as np\nimport pylab as pl\n\nfile = open(\"trainingdata.txt\",buffering=1)\nnDoc = int(file.readline())\n",
"import numpy as np\nimport pylab as pl\nfile = open('trainingdata.txt', buffering=1)\nnDoc = int(file.readline())\n",
"<import token>\nfile = open('trainingdata.txt', buffering=1)\nnDoc = int(file.readline())\n",
"<import token>\n<assignment token>\n"
] | false |
99,061 |
df86061ce7038be19531a1b8154e34c86d9f867d
|
ii = [('CookGHP3.py', 1), ('LyelCPG2.py', 1), ('AubePRP2.py', 13), ('PettTHE.py', 1), ('AubePRP.py', 1), ('CoolWHM.py', 3), ('BuckWGM.py', 1), ('MereHHB.py', 1), ('WilkJMC.py', 1), ('MackCNH.py', 3), ('FitzRNS.py', 1), ('MackCNH2.py', 3), ('ClarGE3.py', 6), ('DibdTRL.py', 14), ('LyelCPG3.py', 3), ('BowrJMM3.py', 1)]
|
[
"ii = [('CookGHP3.py', 1), ('LyelCPG2.py', 1), ('AubePRP2.py', 13), ('PettTHE.py', 1), ('AubePRP.py', 1), ('CoolWHM.py', 3), ('BuckWGM.py', 1), ('MereHHB.py', 1), ('WilkJMC.py', 1), ('MackCNH.py', 3), ('FitzRNS.py', 1), ('MackCNH2.py', 3), ('ClarGE3.py', 6), ('DibdTRL.py', 14), ('LyelCPG3.py', 3), ('BowrJMM3.py', 1)]",
"ii = [('CookGHP3.py', 1), ('LyelCPG2.py', 1), ('AubePRP2.py', 13), (\n 'PettTHE.py', 1), ('AubePRP.py', 1), ('CoolWHM.py', 3), ('BuckWGM.py', \n 1), ('MereHHB.py', 1), ('WilkJMC.py', 1), ('MackCNH.py', 3), (\n 'FitzRNS.py', 1), ('MackCNH2.py', 3), ('ClarGE3.py', 6), ('DibdTRL.py',\n 14), ('LyelCPG3.py', 3), ('BowrJMM3.py', 1)]\n",
"<assignment token>\n"
] | false |
99,062 |
b077542e34b80821034a279e138e7a716e21d155
|
# -*- coding: utf-8 -*-
from django.db import models
from models import MODERATION_STATUS_APPROVED
class MetaManager(type(models.Manager)):
def __new__(cls, name, bases, attrs):
return super(MetaManager, cls).__new__(cls, name, bases, attrs)
class ModeratorManagerFactory(object):
@staticmethod
def get(bases):
if not isinstance(bases, tuple):
bases = (bases,)
bases = (ModeratorManager,) + bases
return MetaManager(ModeratorManager.__name__, bases,
{'use_for_related_fields': True})
class ModeratorManager(models.Manager):
def get_queryset(self):
return super(ModeratorManager, self).get_queryset()\
.filter(moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)
def unmoderated(self):
return super(ModeratorManager, self).get_queryset()
|
[
"# -*- coding: utf-8 -*-\n\n\nfrom django.db import models\n\nfrom models import MODERATION_STATUS_APPROVED\n\n\nclass MetaManager(type(models.Manager)):\n def __new__(cls, name, bases, attrs):\n return super(MetaManager, cls).__new__(cls, name, bases, attrs)\n\n\nclass ModeratorManagerFactory(object):\n @staticmethod\n def get(bases):\n if not isinstance(bases, tuple):\n bases = (bases,)\n\n bases = (ModeratorManager,) + bases\n\n return MetaManager(ModeratorManager.__name__, bases,\n {'use_for_related_fields': True})\n\n\nclass ModeratorManager(models.Manager):\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset()\\\n .filter(moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"from django.db import models\nfrom models import MODERATION_STATUS_APPROVED\n\n\nclass MetaManager(type(models.Manager)):\n\n def __new__(cls, name, bases, attrs):\n return super(MetaManager, cls).__new__(cls, name, bases, attrs)\n\n\nclass ModeratorManagerFactory(object):\n\n @staticmethod\n def get(bases):\n if not isinstance(bases, tuple):\n bases = bases,\n bases = (ModeratorManager,) + bases\n return MetaManager(ModeratorManager.__name__, bases, {\n 'use_for_related_fields': True})\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n\n\nclass MetaManager(type(models.Manager)):\n\n def __new__(cls, name, bases, attrs):\n return super(MetaManager, cls).__new__(cls, name, bases, attrs)\n\n\nclass ModeratorManagerFactory(object):\n\n @staticmethod\n def get(bases):\n if not isinstance(bases, tuple):\n bases = bases,\n bases = (ModeratorManager,) + bases\n return MetaManager(ModeratorManager.__name__, bases, {\n 'use_for_related_fields': True})\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n\n\nclass MetaManager(type(models.Manager)):\n <function token>\n\n\nclass ModeratorManagerFactory(object):\n\n @staticmethod\n def get(bases):\n if not isinstance(bases, tuple):\n bases = bases,\n bases = (ModeratorManager,) + bases\n return MetaManager(ModeratorManager.__name__, bases, {\n 'use_for_related_fields': True})\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n<class token>\n\n\nclass ModeratorManagerFactory(object):\n\n @staticmethod\n def get(bases):\n if not isinstance(bases, tuple):\n bases = bases,\n bases = (ModeratorManager,) + bases\n return MetaManager(ModeratorManager.__name__, bases, {\n 'use_for_related_fields': True})\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n<class token>\n\n\nclass ModeratorManagerFactory(object):\n <function token>\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n<class token>\n<class token>\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n\n def unmoderated(self):\n return super(ModeratorManager, self).get_queryset()\n",
"<import token>\n<class token>\n<class token>\n\n\nclass ModeratorManager(models.Manager):\n\n def get_queryset(self):\n return super(ModeratorManager, self).get_queryset().filter(\n moderator_entry__moderation_status=MODERATION_STATUS_APPROVED)\n <function token>\n",
"<import token>\n<class token>\n<class token>\n\n\nclass ModeratorManager(models.Manager):\n <function token>\n <function token>\n",
"<import token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,063 |
1d9a06b028fd87332fa30c42f06e3e6dc791c504
|
from org.eclipse.swt import SWT
from org.eclipse.swt.widgets import Shell, ToolBar, ToolItem, Listener
def addItem(i, bar):
item = ToolItem(bar, SWT.PUSH)
item.setText("Item " + str(i))
class PrintListener(Listener):
def handleEvent(self, e):
print "Selected item", i
newItem = ToolItem(bar, SWT.PUSH)
newItem.setText("Extra Item " + str(i))
bar.pack()
item.addListener(SWT.Selection, PrintListener())
shell = Shell()
bar = ToolBar(shell, SWT.BORDER)
for i in range(8):
addItem(i, bar)
bar.pack()
shell.open()
display = shell.getDisplay()
while not shell.isDisposed():
if not display.readAndDispatch():
display.sleep()
display.dispose()
|
[
"from org.eclipse.swt import SWT\nfrom org.eclipse.swt.widgets import Shell, ToolBar, ToolItem, Listener\n\ndef addItem(i, bar):\n item = ToolItem(bar, SWT.PUSH)\n item.setText(\"Item \" + str(i))\n class PrintListener(Listener):\n def handleEvent(self, e):\n print \"Selected item\", i\n newItem = ToolItem(bar, SWT.PUSH)\n newItem.setText(\"Extra Item \" + str(i))\n bar.pack()\n\n item.addListener(SWT.Selection, PrintListener())\n\nshell = Shell()\nbar = ToolBar(shell, SWT.BORDER)\nfor i in range(8):\n addItem(i, bar)\n \nbar.pack()\nshell.open()\ndisplay = shell.getDisplay()\nwhile not shell.isDisposed():\n if not display.readAndDispatch():\n display.sleep()\n \ndisplay.dispose()\n"
] | true |
99,064 |
c16436863ccc38b07235f67dcf3e7ae5923def61
|
# -*- coding: utf-8 -*-
"""Lab10 Juan Jose- EoML.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1GIKbtGE9V2_m9EkY03FaIWb1OfR7-367
# Laboratorio 10
En este laboratorio encontraremos el valor óptimo de k en k-mean clustering con una gráfica de distancia cuadrada media respecto a k
"""
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
print("Todos los paquetes han sido importados:")
"""Trabajaremos sobre la base de datos de casas de Wholesales costumers, que se encuentra en [Machine Learning Repository](http://archive.ics.uci.edu/ml/datasets/Wholesale+customers).
1. Enumera qué variables son continuas y qué variables son categóricas
2. Despliega la estadística descriptiva de las variables continuas del dataset para explorarlo
3. Convierte las variables categóricas en binarias usando pd.get_dummies.
4. Estandariza las variables usando una escala estándar (StandardScaler). Explica por qué este paso es importante.
5. En un rango de 1 a 15 para k, realiza el clustering sobre los datos y para cada k guarda el valor de la distancia cuadrada media.
6. Haz una gráfica de k contra la distancia cuadrada media. ¿Qué criterio puedes usar para identificar el valor óptimo de k? ¿Qué pasa cuando k se aproxima a n, la cantidad de observaciones?
7. Repite el clustering ahora usando el valor óptimo de k. Ubica la posición de cada centroide y comenta acerca de cada uno (por ejemplo, ¿qué valores para cada variable caracterizan a cada centroide?)
#Upload File
"""
from google.colab import files
uploaded = files.upload()
import io
data= pd.read_csv(io.BytesIO(uploaded['Wholesale customers data.csv']))
"""#1Enumera las columnas continuas"""
cols = data.columns
num_cols = data._get_numeric_data().columns
list(set(cols) - set(num_cols))
"""#2 Estadística descriptiva"""
data.describe()
"""#3 Convertir variables categóricas en binarias.
No hay entonces no lo hice.
#4 Estandarizar las variables
"""
scaler = StandardScaler()
scaled_df = scaler.fit_transform(data)
#Es importante para estandarizar la data que la media=0 y la desviación=1 para aplicar ML
"""#5 k-mean cluster"""
X = data.iloc[:,3:8].values
K = 15
m=X.shape[0]
n=X.shape[1]
n_iter=100
Centroids=np.array([]).reshape(n,0)
for i in range(K):
rand=np.random.randint(0,m-1)
Centroids=np.c_[Centroids,X[rand]]
EuclidianDistance=np.array([]).reshape(m,0)
for k in range(K):
tempDist=np.sum((X-Centroids[:,k])**2,axis=1)
EuclidianDistance=np.c_[EuclidianDistance,tempDist]
C=np.argmin(EuclidianDistance,axis=1)+1
Y={}
for k in range(K):
Y[k+1]=np.array([]).reshape(2,0)
for i in range(m):
Y[C[i]]=np.c_[Y[C[i]],X[i]]
for k in range(K):
Y[k+1]=Y[k+1].T
for k in range(K):
Centroids[:,k]=np.mean(Y[k+1],axis=0)
for i in range(n_iter):
EuclidianDistance=np.array([]).reshape(m,0)
for k in range(K):
tempDist=np.sum((X-Centroids[:,k])**2,axis=1)
EuclidianDistance=np.c_[EuclidianDistance,tempDist]
C=np.argmin(EuclidianDistance,axis=1)+1
Y={}
for k in range(K):
Y[k+1]=np.array([]).reshape(2,0)
for i in range(m):
Y[C[i]]=np.c_[Y[C[i]],X[i]]
for k in range(K):
Y[k+1]=Y[k+1].T
for k in range(K):
Centroids[:,k]=np.mean(Y[k+1],axis=0)
Output=Y
"""#6 Graph"""
color=['red','blue','green','cyan','magenta']
labels=['cluster1','cluster2','cluster3','cluster4','cluster5']
for k in range(K):
plt.scatter(Output[k+1][:,0],Output[k+1][:,1],c=color[k],label=labels[k])
plt.scatter(Centroids[0,:],Centroids[1,:],s=300,c='yellow',label='Centroids')
plt.xlabel('Income')
plt.ylabel('Number of transactions')
plt.legend()
plt.show()
"""#7 Valor optimo de k"""
|
[
"# -*- coding: utf-8 -*-\n\"\"\"Lab10 Juan Jose- EoML.ipynb\n\nAutomatically generated by Colaboratory.\n\nOriginal file is located at\n https://colab.research.google.com/drive/1GIKbtGE9V2_m9EkY03FaIWb1OfR7-367\n\n# Laboratorio 10\n\nEn este laboratorio encontraremos el valor óptimo de k en k-mean clustering con una gráfica de distancia cuadrada media respecto a k\n\"\"\"\n\nimport pandas as pd\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.cluster import KMeans\n\nimport matplotlib.pyplot as plt\n\nprint(\"Todos los paquetes han sido importados:\")\n\n\"\"\"Trabajaremos sobre la base de datos de casas de Wholesales costumers, que se encuentra en [Machine Learning Repository](http://archive.ics.uci.edu/ml/datasets/Wholesale+customers).\n\n1. Enumera qué variables son continuas y qué variables son categóricas\n2. Despliega la estadística descriptiva de las variables continuas del dataset para explorarlo\n3. Convierte las variables categóricas en binarias usando pd.get_dummies.\n4. Estandariza las variables usando una escala estándar (StandardScaler). Explica por qué este paso es importante.\n5. En un rango de 1 a 15 para k, realiza el clustering sobre los datos y para cada k guarda el valor de la distancia cuadrada media.\n6. Haz una gráfica de k contra la distancia cuadrada media. ¿Qué criterio puedes usar para identificar el valor óptimo de k? ¿Qué pasa cuando k se aproxima a n, la cantidad de observaciones?\n7. Repite el clustering ahora usando el valor óptimo de k. Ubica la posición de cada centroide y comenta acerca de cada uno (por ejemplo, ¿qué valores para cada variable caracterizan a cada centroide?)\n\n#Upload File\n\"\"\"\n\nfrom google.colab import files\nuploaded = files.upload()\nimport io\ndata= pd.read_csv(io.BytesIO(uploaded['Wholesale customers data.csv']))\n\n\"\"\"#1Enumera las columnas continuas\"\"\"\n\ncols = data.columns\n\nnum_cols = data._get_numeric_data().columns\n\nlist(set(cols) - set(num_cols))\n\n\"\"\"#2 Estadística descriptiva\"\"\"\n\ndata.describe()\n\n\"\"\"#3 Convertir variables categóricas en binarias.\n\nNo hay entonces no lo hice.\n\n#4 Estandarizar las variables\n\"\"\"\n\nscaler = StandardScaler()\nscaled_df = scaler.fit_transform(data)\n\n#Es importante para estandarizar la data que la media=0 y la desviación=1 para aplicar ML\n\n\"\"\"#5 k-mean cluster\"\"\"\n\nX = data.iloc[:,3:8].values\nK = 15\nm=X.shape[0] \nn=X.shape[1] \nn_iter=100\nCentroids=np.array([]).reshape(n,0) \nfor i in range(K):\n rand=np.random.randint(0,m-1)\n Centroids=np.c_[Centroids,X[rand]]\n\nEuclidianDistance=np.array([]).reshape(m,0)\nfor k in range(K):\n tempDist=np.sum((X-Centroids[:,k])**2,axis=1)\n EuclidianDistance=np.c_[EuclidianDistance,tempDist]\nC=np.argmin(EuclidianDistance,axis=1)+1\n\nY={}\nfor k in range(K):\n Y[k+1]=np.array([]).reshape(2,0)\n \nfor i in range(m):\n Y[C[i]]=np.c_[Y[C[i]],X[i]]\n \nfor k in range(K):\n Y[k+1]=Y[k+1].T\n \nfor k in range(K):\n Centroids[:,k]=np.mean(Y[k+1],axis=0)\n\nfor i in range(n_iter):\n EuclidianDistance=np.array([]).reshape(m,0)\n for k in range(K):\n tempDist=np.sum((X-Centroids[:,k])**2,axis=1)\n EuclidianDistance=np.c_[EuclidianDistance,tempDist]\n C=np.argmin(EuclidianDistance,axis=1)+1\n Y={}\n for k in range(K):\n Y[k+1]=np.array([]).reshape(2,0)\n for i in range(m):\n Y[C[i]]=np.c_[Y[C[i]],X[i]]\n \n for k in range(K):\n Y[k+1]=Y[k+1].T\n \n for k in range(K):\n Centroids[:,k]=np.mean(Y[k+1],axis=0)\n Output=Y\n\n\"\"\"#6 Graph\"\"\"\n\ncolor=['red','blue','green','cyan','magenta']\nlabels=['cluster1','cluster2','cluster3','cluster4','cluster5']\nfor k in range(K):\n plt.scatter(Output[k+1][:,0],Output[k+1][:,1],c=color[k],label=labels[k])\nplt.scatter(Centroids[0,:],Centroids[1,:],s=300,c='yellow',label='Centroids')\nplt.xlabel('Income')\nplt.ylabel('Number of transactions')\nplt.legend()\nplt.show()\n\n\"\"\"#7 Valor optimo de k\"\"\""
] | true |
99,065 |
c714d9e762d9821a5ab4897019d72a3ddd5ccd61
|
import logging
import traceback
from RPi import GPIO
from time import sleep
from multiprocessing import Process
from constants import LOG_CONFIG, MAIN_LIGHT_PINS, CROSS_LIGHT_PINS, BOUNCE_TIME, BUTTON_PIN, DISPLAY_PINS
from rgb import RGB
from button import Button
from digit_display import DigitDisplay
logging.basicConfig(**LOG_CONFIG)
log = logging.getLogger(__name__)
def main():
try:
# GPIO.setmode(GPIO.BCM)
log.debug('main Successfully Setup')
test_green()
except KeyboardInterrupt:
log.debug('User ended the program')
except Exception as e:
var = traceback.format_exc()
log.debug(e)
log.debug(str(var))
finally:
GPIO.cleanup()
log.debug('Main Cleaned Up')
def button_tests():
def cb():
log.debug('Entered Button cb')
sleep(5)
log.debug('Exiting Button cb')
button = Button(BUTTON_PIN, BOUNCE_TIME, cb)
while True:
pass
def main_light_tests():
main_light = RGB(**MAIN_LIGHT_PINS)
log.debug('main_light Successfully Setup')
log.debug('Turning Red Light on for main_light')
main_light.red()
sleep(2)
log.debug('Turning Blue Light on for main_light')
main_light.blue()
sleep(2)
log.debug('Turning Green Light on for main_light')
main_light.green()
sleep(2)
def cross_light_tests():
cross_light = RGB(**CROSS_LIGHT_PINS)
log.debug('cross_light Successfully Setup')
log.debug('Turning Red Light on for cross_light')
cross_light.red()
sleep(2)
log.debug('Turning Blue Light on for cross_light')
cross_light.blue()
sleep(2)
log.debug('Turning Green Light on for cross_light')
cross_light.green()
sleep(2)
def multi_threading():
import threading
def test():
for i in range(5):
log.debug(i)
sleep(1)
threading.Thread(target=test)
threading.Thread(target=test)
def display_tests():
GPIO.setmode(GPIO.BCM)
d = DigitDisplay(DISPLAY_PINS)
for x in range(10):
d.display(x)
sleep(1)
while True:
pass
def test_green():
GPIO.setmode(GPIO.BCM)
cross_light = RGB(**CROSS_LIGHT_PINS)
cross_light.green()
while True:
pass
if __name__ == '__main__':
main()
|
[
"import logging\nimport traceback\nfrom RPi import GPIO\nfrom time import sleep\nfrom multiprocessing import Process\n\nfrom constants import LOG_CONFIG, MAIN_LIGHT_PINS, CROSS_LIGHT_PINS, BOUNCE_TIME, BUTTON_PIN, DISPLAY_PINS\nfrom rgb import RGB\nfrom button import Button\nfrom digit_display import DigitDisplay\n\nlogging.basicConfig(**LOG_CONFIG)\nlog = logging.getLogger(__name__)\n\ndef main():\n try:\n # GPIO.setmode(GPIO.BCM)\n\n log.debug('main Successfully Setup')\n test_green()\n\n except KeyboardInterrupt:\n log.debug('User ended the program')\n\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\ndef button_tests():\n def cb():\n log.debug('Entered Button cb')\n sleep(5)\n log.debug('Exiting Button cb')\n button = Button(BUTTON_PIN, BOUNCE_TIME, cb)\n\n while True:\n pass\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\ndef multi_threading():\n import threading\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n\n threading.Thread(target=test)\n threading.Thread(target=test)\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n\n for x in range(10):\n d.display(x)\n sleep(1)\n\n while True:\n pass\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n\nif __name__ == '__main__':\n main()\n",
"import logging\nimport traceback\nfrom RPi import GPIO\nfrom time import sleep\nfrom multiprocessing import Process\nfrom constants import LOG_CONFIG, MAIN_LIGHT_PINS, CROSS_LIGHT_PINS, BOUNCE_TIME, BUTTON_PIN, DISPLAY_PINS\nfrom rgb import RGB\nfrom button import Button\nfrom digit_display import DigitDisplay\nlogging.basicConfig(**LOG_CONFIG)\nlog = logging.getLogger(__name__)\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\ndef button_tests():\n\n def cb():\n log.debug('Entered Button cb')\n sleep(5)\n log.debug('Exiting Button cb')\n button = Button(BUTTON_PIN, BOUNCE_TIME, cb)\n while True:\n pass\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\ndef multi_threading():\n import threading\n\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n threading.Thread(target=test)\n threading.Thread(target=test)\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\nlogging.basicConfig(**LOG_CONFIG)\nlog = logging.getLogger(__name__)\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\ndef button_tests():\n\n def cb():\n log.debug('Entered Button cb')\n sleep(5)\n log.debug('Exiting Button cb')\n button = Button(BUTTON_PIN, BOUNCE_TIME, cb)\n while True:\n pass\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\ndef multi_threading():\n import threading\n\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n threading.Thread(target=test)\n threading.Thread(target=test)\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\nlogging.basicConfig(**LOG_CONFIG)\n<assignment token>\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\ndef button_tests():\n\n def cb():\n log.debug('Entered Button cb')\n sleep(5)\n log.debug('Exiting Button cb')\n button = Button(BUTTON_PIN, BOUNCE_TIME, cb)\n while True:\n pass\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\ndef multi_threading():\n import threading\n\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n threading.Thread(target=test)\n threading.Thread(target=test)\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\nif __name__ == '__main__':\n main()\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\ndef button_tests():\n\n def cb():\n log.debug('Entered Button cb')\n sleep(5)\n log.debug('Exiting Button cb')\n button = Button(BUTTON_PIN, BOUNCE_TIME, cb)\n while True:\n pass\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\ndef multi_threading():\n import threading\n\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n threading.Thread(target=test)\n threading.Thread(target=test)\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\n<function token>\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\ndef multi_threading():\n import threading\n\n def test():\n for i in range(5):\n log.debug(i)\n sleep(1)\n threading.Thread(target=test)\n threading.Thread(target=test)\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n\n\ndef main():\n try:\n log.debug('main Successfully Setup')\n test_green()\n except KeyboardInterrupt:\n log.debug('User ended the program')\n except Exception as e:\n var = traceback.format_exc()\n log.debug(e)\n log.debug(str(var))\n finally:\n GPIO.cleanup()\n log.debug('Main Cleaned Up')\n\n\n<function token>\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\n<function token>\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\n<function token>\n\n\ndef display_tests():\n GPIO.setmode(GPIO.BCM)\n d = DigitDisplay(DISPLAY_PINS)\n for x in range(10):\n d.display(x)\n sleep(1)\n while True:\n pass\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef main_light_tests():\n main_light = RGB(**MAIN_LIGHT_PINS)\n log.debug('main_light Successfully Setup')\n log.debug('Turning Red Light on for main_light')\n main_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for main_light')\n main_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for main_light')\n main_light.green()\n sleep(2)\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\n<function token>\n<function token>\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n\n\ndef cross_light_tests():\n cross_light = RGB(**CROSS_LIGHT_PINS)\n log.debug('cross_light Successfully Setup')\n log.debug('Turning Red Light on for cross_light')\n cross_light.red()\n sleep(2)\n log.debug('Turning Blue Light on for cross_light')\n cross_light.blue()\n sleep(2)\n log.debug('Turning Green Light on for cross_light')\n cross_light.green()\n sleep(2)\n\n\n<function token>\n<function token>\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef test_green():\n GPIO.setmode(GPIO.BCM)\n cross_light = RGB(**CROSS_LIGHT_PINS)\n cross_light.green()\n while True:\n pass\n\n\n<code token>\n",
"<import token>\n<code token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<code token>\n"
] | false |
99,066 |
3dff43c6018a930139687f092a00145ffc478ef7
|
# Generate female voice syllablde using Google TTS API
# Go to home directory that contains env folder and run source env/bin/activate
# Then run the Python script with python EyeSound_generate.py
# from gtts import gTTS
from gtts import *
# syllables = ["fa", "to", "se", "pu", "ti", "mi", "ro", "pe", "fa", "lu", "to", "la", "si", "su", "fe", "le", "si", "so", "la", "mu", "fe", "ga", "fo", "fe", "fu", "pi", "li", "mo", "re", "sa", "su", "po", "fa", "fi", "gu", "le", "me", "pi", "fo", "ta", "tu", "pe", "la", "ro", "re", "ru", "fi", "ri", "po", "le", "ga", "fu", "go", "ra", "gui", "ru", "pe", "se", "li", "mo", "pa", "pu", "me", "sa", "po", "ge", "tu", "gui", "fi", "to", "fe", "pa", "pu", "ro", "pa", "ri", "pu", "ge", "pe", "ri", "to", "ra", "ru", "te", "ma", "go", "me", "lu", "li", "ti", "lo", "me", "ra", "mu", "so", "ga", "pi", "tu", "re", "re", "mi", "po", "ga", "gu", "se", "ra", "mo", "te", "su", "mi", "si", "so", "ge", "ta", "ru", "fo", "ta", "ti", "mu", "se", "fe", "ti", "go", "ma", "lu", "ge", "pa", "lo", "pe", "gu", "ri", "pi", "go", "te", "la", "tu", "mo", "ma", "li", "fu", "me", "ge", "fi", "lo", "fa", "fu", "re", "ta", "so", "le", "mu", "si", "gui", "fo", "se", "ma", "gu", "lo", "sa", "mi", "lu", "te", "te", "gui", "ro", "sa", "su", "le", "ru", "ri", "la", "me", "to", "lu", "te", "mi", "ga", "to", "gui", "lo", "me", "ga", "tu", "re", "lo", "ma", "pi", "gu", "lo", "fu", "ti", "fa", "se", "po", "ru", "re", "si", "ta", "po", "pi", "mo", "le", "sa", "lu", "fe", "mo", "fa", "li", "tu", "ro", "mu", "mi", "pa", "ge", "go", "gu", "ge", "li", "fa", "lo", "ti", "so", "te", "pa", "mu", "ge", "to", "ta", "ti", "ru", "so", "tu", "li", "ga", "re", "lo", "tu", "le", "ri", "sa", "ro", "mi", "po", "se", "ra", "fu", "te", "fo", "ra", "mi", "mu", "fo", "su", "gui", "ra", "le", "ro", "su", "fe", "pi", "ma", "mo", "si", "ro", "ge", "ma", "gu", "se", "go", "sa", "ri", "pu", "go", "gu", "pi", "ma", "pe", "fo", "fu", "se", "gui", "ra", "so", "ri", "to", "re", "la", "su", "le", "po", "ga", "fi", "fu", "mo", "lu", "si", "ta", "fe", "mo", "pu", "me", "ti", "pa", "go", "li", "fo", "pe", "fa", "ru", "me", "so", "pa", "si", "lu", "to", "pu", "fi", "sa", "te", "so", "mu", "pe", "fi", "la", "fo", "fi", "go", "fe", "ta", "pu", "pe", "ro", "la", "gui", "su", "po", "ro", "gu", "ma", "me", "mi", "ga", "fi", "go", "pu", "le", "fa", "mu", "fe", "mo", "ti", "te", "to", "li", "pa", "gu", "se", "to", "lu", "ga", "ge", "ri", "la", "gui", "mo", "mu", "te", "ga", "fu", "te", "ro", "mi", "ge", "fo", "pi", "sa", "lu", "le", "fo", "su", "sa", "le", "fi", "pa", "li", "fo", "tu", "pe", "pa", "ru", "ge", "go", "si", "me", "ro", "ti", "ma", "mu", "me", "lo", "pu", "fa", "se", "ti", "ma", "ri", "so", "ru", "fe", "ma", "lu", "se", "to", "li", "fe", "go", "ri", "la", "pu", "fe", "mo", "mu", "ta", "pe", "pi", "sa", "mi", "to", "su", "re", "ta", "gu", "le", "po", "pi", "se", "mo", "si", "fa", "fu", "re", "po", "tu", "ra", "re", "gui", "ta", "si", "po", "fu", "ge", "la", "tu", "re", "lo", "ri", "le", "lo", "mi", "ta", "su", "ge", "go", "fu", "pa", "te", "li", "ra", "ti", "lo", "gu", "se", "sa", "su", "pe", "so", "gui", "pe", "po", "gui", "ga", "ru", "pe", "so", "ru", "la", "fe", "si", "fa", "pi", "ro", "lu", "me", "ra", "pu", "me", "fo", "fi", "re", "so", "fi", "ra", "tu", "te", "fo", "sa", "li", "mu", "fe", "ri", "sa", "me", "fo", "mu", "sa", "gui", "po", "le", "su", "me", "fi", "ta", "so", "pu", "ra", "go", "ga", "mi", "tu", "pe", "pi", "ga", "le", "po", "ru", "fa", "ri", "mo", "se", "fu", "pe", "si", "la", "ro", "tu", "ma", "lo", "la", "pi", "gu", "re", "fi", "la", "re", "so", "su", "la", "ti", "fo", "me", "gu", "se", "ri", "pa", "go", "mu", "fa", "to", "fa", "ti", "pu", "me", "li", "fa", "fe", "to", "tu", "ma", "mi", "go", "pe", "mu", "ge", "gui", "ga", "po", "gu", "ga", "po", "pa", "gui", "lu", "le", "gui", "ma", "ge", "ro", "gu", "ga", "li", "ro", "ge", "pu", "te", "li", "fa", "to", "lu", "sa", "ro", "ta", "ri", "su", "te", "si", "ta", "pe", "go", "pu", "pa", "pi", "lo", "te", "ru", "fe", "pi", "sa", "fo", "fu", "pa", "so", "ra", "si", "fu", "se", "mi", "pa", "te", "mo", "fu", "ta", "si", "to", "re", "tu", "le", "mi", "ma", "mo", "su", "ta", "mo", "ma", "fi", "ru", "ge", "ti", "ra", "se", "lo", "lu", "ra", "fi", "so", "fe", "lu", "re", "ti", "ra", "lo", "ru", "la", "tu", "fa", "se", "ro", "ti", "li", "ge", "pa", "tu", "fo", "gu", "mo", "gui", "se", "ta", "me", "ra", "po", "pu", "li", "gui", "lu", "la", "fe", "go", "gui", "gui", "pe", "ma", "lu", "go", "tu", "go", "pi", "le", "ra", "re", "ta", "to", "tu", "ri", "si", "mu", "ra", "le", "to", "mi", "ti", "le", "fa", "gu", "so", "pu", "ro", "li", "re", "la", "ge", "fa", "so", "lu", "ti", "pi", "fu", "pa", "pe", "so", "pi", "ri", "se", "ga", "pu", "to", "ru", "so", "mi", "fe", "ma", "le", "ga", "fo", "ru", "fi", "ri", "gu", "ma", "me", "fo", "fi", "si", "re", "la", "mu", "po", "lu", "po", "ti", "me", "pa", "fe", "pa", "lo", "su", "gui", "fi", "pu", "ga", "te", "mo", "si", "fi", "te", "sa", "fu", "ro", "su", "lo", "fi", "te", "sa", "pe", "ma", "ro", "gu", "si", "ti", "su", "sa", "ge", "lo", "ri", "mi", "fe", "ra", "ru", "mo", "mu", "to", "ri", "ge", "fa", "te", "sa", "mo", "fu", "pi", "mi", "ru", "ta", "re", "po", "li", "pi", "me", "ta", "su", "lo", "fu", "fo", "si", "pe", "ga", "se", "la", "go", "mu", "mi", "li", "ra", "ri", "le", "lu", "go", "ri", "te", "to", "pa", "pu", "ma", "ti", "ru", "pe", "go", "su", "si", "te", "ra", "po", "ga", "ma", "gui", "fe", "tu", "mo", "fi", "re", "mo", "ma", "lu", "la", "mi", "fu", "re", "to", "ru", "ri", "me", "sa", "so", "fa", "pa", "li", "re", "pu", "to", "li", "pe", "go", "ga", "su", "sa", "ri", "pu", "me", "so", "lu", "pi", "ge", "ma", "lo", "ra", "fa", "fi", "se", "fu", "ro", "pi", "me", "po", "ta", "fu", "ga", "pi", "su", "te", "mo", "gu", "mi", "pe", "pa", "mo", "ma", "ta", "si", "te", "ru", "so", "gui", "se", "so", "la", "mu", "fa", "li", "lu", "se", "lo", "fu", "li", "le", "la", "ro", "sa", "ga", "ti", "pe", "mu", "fo", "mi", "ge", "lo", "sa", "ru", "pa", "gui", "gu", "ge", "ro", "tu", "gui", "se", "ga", "to", "pa", "la", "pi", "me", "su", "lo", "si", "fe", "ro", "ra", "tu", "ta", "fi", "tu", "fe", "fo", "mu", "ti", "re", "fa", "fo", "ta", "sa", "mi", "ge", "gu", "po", "ti", "le", "fo", "fa", "gu", "ra", "si", "mu", "le", "po", "pu", "fi", "fe", "ta", "go", "la", "po", "te", "mu", "pi", "ma", "su", "ga", "ge", "mi", "so", "gu", "li", "re", "pa", "to", "mu", "mo", "se", "sa", "li", "me", "mo", "le", "gu", "li", "fa", "pu", "la", "re", "li", "lo", "ru", "mi", "te", "ma", "go", "ru", "so", "le", "ga", "fi", "pe", "lo", "ge", "tu", "mi", "ta", "lu", "ra", "me", "fi", "mo", "tu", "fi", "ge", "sa", "po", "gu", "ro", "re", "la", "si", "re", "to", "pe", "ru", "ti", "ra", "fu", "ta", "le", "ri", "po", "lu", "ti", "me", "fa", "lo", "fu", "lo", "pe", "ra", "gui", "ge", "so", "se", "fu", "ri", "la", "gu", "ma", "fe", "pi", "to", "fu", "gui", "fe", "ga", "so", "tu", "po", "ge", "fa", "mi", "se", "ro", "fe", "su", "si", "pa", "ru", "fa", "pe", "gui", "go", "mu", "ri", "le", "ra", "mo", "pu", "to", "me", "ma", "pi", "te", "fo", "me", "pu", "gui", "sa", "mu", "sa", "se", "si", "ro", "su", "si", "pe", "ta", "fo", "su", "fo", "te", "pa", "ri", "fe", "go", "re", "lu", "fi", "ga", "tu", "pa", "te", "ti", "fo", "pu", "pi", "se", "la", "ro", "lu", "go", "fe", "ta", "ti", "le", "fa", "so", "gui", "su", "ge", "po", "pa", "li", "te", "lu", "fa", "ru", "se", "gui", "to", "mo", "ga", "pi", "ru", "fe", "su", "ra", "go", "pi", "gu", "te", "go", "ma", "pi", "le", "mu", "ta", "gu", "ge", "ti", "so", "go", "sa", "ri", "mu", "ge", "tu", "la", "to", "fi", "lu", "le", "to", "sa", "ri", "se", "gu", "ga", "su", "fe", "mi", "ro", "to", "ma", "mi", "tu", "pe", "gu", "pa", "po", "mi", "mu", "me", "mo", "ta", "gui", "re", "tu", "sa", "mu", "te", "li", "fo", "ro", "ta", "si", "pu", "le", "pu", "sa", "fo", "ti", "pu", "pe", "fo", "la", "ti", "ge", "su", "ma", "lu", "le", "pi", "lo", "fo", "la", "fi", "su", "te", "fu", "ma", "lo", "si", "tu", "se", "lo", "ra", "fi", "me", "ru", "pa", "fu", "me", "fi", "mo", "po", "fa", "gui", "lu", "se", "ru", "ga", "mo", "ri", "ru", "re", "so", "fa", "si", "pe", "fu", "ra", "tu", "pe", "ri", "po", "lo", "ra", "ti", "fu", "re", "lu", "ta", "ro", "li", "fu", "fe", "ro", "ga", "mi", "fe", "pu", "la", "pu", "re", "si", "go", "so", "pa", "li", "gu", "me", "mu", "mi", "ga", "te", "ru", "lo", "fe", "li", "go", "gu", "la", "te", "ga", "ro", "gu", "gui", "mo", "fa", "me", "pi", "lu", "fe", "ri", "fa", "le", "tu", "fo", "se", "fi", "fo", "pu", "ra", "re", "la", "go", "pu", "ti", "to", "ma", "pe", "si", "pu", "me", "gui", "ma", "re", "gu", "po", "le", "pi", "mo", "mu", "ga", "ge", "ma", "to", "su", "si", "so", "ga", "fe", "mi", "mu", "se", "ti", "ta", "me", "su", "to", "pe", "gui", "lo", "lu", "ta", "fe", "fa", "po", "fu", "pi", "ro", "ta", "le", "li", "fu", "re", "si", "ra", "se", "fu", "ro", "te", "mi", "so", "ru", "ma", "se", "pa", "mo", "tu", "ri", "po", "pa", "te", "fi", "gu", "te", "li", "sa", "ge", "mu", "mo", "ge", "ti", "ro", "su", "pa", "pe", "ra", "so", "ru", "fi", "go", "ra", "ge", "ri", "tu", "le", "pi", "pa", "pe", "pu", "go", "re", "si", "po", "fu", "fa", "le", "ta", "fo", "lu", "li", "fo", "sa", "re", "ti", "su", "ge", "fi", "la", "fe", "lu", "so", "me", "ri", "to", "tu", "sa", "me", "sa", "lo", "mu", "mi", "lo", "la", "se", "gui", "ru", "pe", "ga", "li", "le", "fo", "su", "se", "ro", "ri", "gu", "pa", "fi", "le", "fa", "po", "mu", "se", "mi", "gu", "sa", "go", "go", "pa", "si", "fe", "po", "lu", "ge", "fo", "mi", "fu", "ra", "ri", "re", "ta", "go", "gu", "te", "fi", "mu", "la", "to", "po", "la", "ri", "pe", "ro", "ru", "pe", "go", "fi", "pu", "ma", "pi", "pe", "ma", "mo", "ru", "le", "gui", "ru", "ra", "lo", "so", "ra", "mi", "re", "so", "fu", "fe", "to", "ti", "tu", "fa", "li", "ge", "la", "fo", "lu", "fe", "li", "su", "ga", "po", "lo", "ta", "pi", "ge", "to", "gu", "me", "lo", "gui", "lu", "sa", "gui", "te", "pa", "ro", "tu", "pe", "si", "pu", "pa", "so", "fo", "sa", "ti", "te", "mo", "mu", "le", "po", "si", "mu", "ta", "mi", "me", "ra", "so", "fu", "ge", "ri", "fu", "ta", "ro", "mo", "ma", "gui", "se", "lo", "tu", "te", "so", "li", "su", "ga", "ti", "se", "sa", "lo", "pu", "me", "pi", "tu", "fa", "mo", "to", "fa", "fi", "me", "go", "pu", "re", "mo", "pi", "ru", "la", "si", "fe", "ga", "to", "su", "re", "ti", "lu", "ma", "fo", "ro", "pe", "pi", "lo", "ga", "tu", "ga", "ri", "le", "ro", "pu", "gu", "re", "so", "ga", "si", "ta", "tu", "fo", "ge", "mi", "pi", "me", "ri", "go", "pa", "gu", "ma", "gui", "re", "so", "gu", "ru", "fe", "go", "ta", "ti", "ma", "gu", "ro", "fe", "fi", "fi", "fe", "mi", "fo", "ra", "pu", "ra", "fi", "me", "fo", "fu", "tu", "me", "lo", "sa", "pi", "ga", "pu", "go", "re", "si", "mi", "te", "si", "so", "ta", "su", "fa", "pi", "te", "lo", "mu", "fu", "le", "to", "pa", "mi", "pa", "mu", "lo", "te", "gui", "ri", "ge", "ti", "mo", "fa", "ru", "pa", "ti", "pe", "po", "su", "su", "pe", "fo", "ma", "fi", "sa", "fu", "po", "pe", "ri", "gui", "le", "fi", "po", "sa", "mu", "la", "li", "se", "go", "tu", "lu", "se", "po", "ra", "ri", "la", "lu", "mo", "le", "ti", "li", "re", "gui", "ro", "ma", "fu", "ta", "si", "ge", "to", "lu", "mu", "te", "ro", "fa", "gui", "fa", "su", "so", "se", "pi", "si", "se", "li", "to", "la", "lu", "sa", "mi", "fe", "mo", "ru", "pu", "ge", "mo", "la", "li", "ra", "ru", "to", "me", "li", "ti", "la", "ti", "lu", "se", "lo", "mo", "fa", "re", "fu", "si", "tu", "la", "fe", "lo", "fi", "lu", "gui", "re", "mo", "pa", "ro", "ra", "pi", "tu", "re", "ro", "ro", "ma", "te", "mu", "gui", "gu", "ra", "me", "po", "gui", "gu", "fi", "ge", "lo", "ga", "mo", "ta", "fi", "gu", "me", "go", "so", "sa", "ge", "su", "li", "su", "ga", "ge", "so", "ti", "su", "li", "se", "so", "ra", "po", "sa", "ri", "mu", "fe", "to", "po", "pa", "le", "gu", "ri", "ru", "fa", "re", "mo", "mi", "mu", "pi", "pe", "fo", "ma", "to", "fa", "si", "pu", "ge", "fo", "go", "ra", "fe", "lu", "ti", "mu", "ta", "pe", "ro", "pi", "ru", "mi", "fe", "ro", "fa", "fo", "pa", "mi", "ru", "le", "po", "to", "ta", "se", "ru", "pi", "lu", "pa", "le", "to", "li", "pu", "si", "me", "to", "sa", "go", "ma", "gui", "fu", "te", "so", "fo", "la", "me", "tu", "fi", "fu", "sa", "te", "go", "si", "fu", "ri", "le", "go", "ta", "lo", "ga", "li", "su", "pe", "mo", "lo", "ga", "pe", "pu", "mi", "pu", "ma", "se", "fo", "ri", "tu", "ti", "te", "po", "la", "so", "ta", "re", "ro", "pi", "ru", "gui", "fu", "se", "ga", "mo", "fi", "fa", "po", "se", "gu", "re", "ma", "pi", "to", "pu", "mo", "ga", "le", "go", "si", "lu", "ri", "su", "te", "ta", "fo", "ti", "ra", "mo", "pe", "pu", "te", "pa", "si", "fo", "lu", "fo", "la", "se", "po", "gui", "pu", "li", "gu", "re", "ma", "to", "li", "ma", "so", "me", "lu", "me", "ga", "gui", "po", "su", "go", "pa", "pe", "mo", "fi", "tu", "pi", "ru", "me", "pa", "po", "si", "ta", "ro", "ge", "fu", "pe", "sa", "fi", "so", "ru", "ro", "ra", "fe", "to", "mi", "su", "mi", "lu", "le", "la", "so", "gui", "pa", "go", "le", "ru", "le", "ta", "ti", "lo", "fu", "po", "sa", "ge", "lo", "li", "mu", "si", "pu", "fe", "ra", "go", "mi", "ga", "lo", "fe", "su", "ge", "ra", "li", "mo", "tu", "to", "fa", "te", "fo", "ti", "fu", "ti", "tu", "ge", "fa", "lo", "pi", "la", "fo", "te", "tu", "se", "fa", "ri", "go", "mu", "so", "ma", "me", "so", "ri", "gu", "fi", "mu", "pe", "sa", "ro", "ri", "sa", "to", "re", "mu", "fe", "la", "mi", "ro", "gu", "lo", "sa", "ge", "lu", "ri", "go", "gu", "ri", "go", "ra", "le", "lu", "ri", "re", "so", "ga", "me", "su", "gui", "fa", "to", "so", "ma", "re", "ru", "si", "lo", "fu", "si", "lo", "pa", "re", "fu", "gui", "fe", "mo", "ta", "ge", "gu", "si", "sa", "lo", "fo", "la", "pe", "gu", "mi", "mo", "tu", "gui", "mo", "sa", "me", "ru", "pi", "pe", "po", "sa", "re", "ru", "ri", "pa", "mo", "to", "ra", "fe", "fu", "ti", "ro", "pu", "mi", "so", "la", "pe", "pu", "mi", "ge", "fo", "fa", "le", "lu", "fi", "ra", "so", "ro", "fa", "le", "tu", "fi", "to", "ru", "ti", "to", "ma", "ge", "gu", "fi", "te", "lo", "la", "fe", "fu", "ti", "ga", "fo", "mo", "ta", "te", "mu", "gui", "fo", "mu", "fi", "fo", "fa", "fe", "mu", "ti", "le", "go", "ra", "te", "tu", "li", "ta", "ro", "go", "pa", "se", "su", "li", "so", "lu", "pi", "ro", "ta", "te", "su", "si", "se", "to", "ma", "se", "pu", "mi", "la", "po", "lo", "ga", "me", "pu", "pi", "po", "su", "li", "po", "ga", "se", "tu", "li", "me", "ro", "pa", "pe", "mu", "pi", "ma", "go", "po", "fu", "ta", "mi", "le", "po", "ge", "ma", "gu", "li", "to", "ge", "po", "li", "ga", "gu", "ru", "fe", "lo", "sa", "li", "pe", "tu", "ga", "pi", "se", "so", "pe", "ra", "fu", "ri", "fo", "me", "fo", "pi", "la", "pu", "gu", "se", "ro", "ra", "fi", "se", "gu", "la", "fi", "te", "go", "te", "fa", "pu", "si", "go", "re", "mo", "ti", "ra", "lu", "pu", "le", "po", "ga", "ri", "te", "pu", "sa", "gui", "ge", "lo", "le", "la", "su", "gui", "mo", "te", "lo", "gui", "sa", "tu", "mu", "te", "fo", "fa", "ti", "re", "su", "fa", "si", "re", "mo", "me", "ta", "tu", "pi", "so", "le", "ro", "mi", "ma", "mu", "tu", "me", "mo", "la", "mi", "fe", "mu", "pa", "ti", "pe", "fo", "fe", "sa", "ru", "mi", "lo", "se", "to", "fi", "fa", "fu", "fu", "re", "so", "ta", "si", "le", "lu", "ra", "li", "me", "ro", "se", "ga", "lu", "fi", "ro", "fe", "so", "si", "ta", "ru", "lu", "pe", "to", "ma", "pi", "ge", "ru", "ma", "ri", "fe", "to", "re", "pa", "mu", "ti", "po", "pe", "go", "ri", "pa", "su", "su", "ge", "go", "pa", "gui", "me", "ge", "ru", "ta", "ti", "mo", "su", "li", "me", "lo", "pa", "ge", "po", "mi", "pa", "fu", "ta", "te", "ti", "su", "mo", "po", "te", "su", "fa", "si", "lo", "fu", "pi", "ge", "mo", "fa", "pe", "go", "li", "ra", "su", "ma", "fe", "li", "mu", "to", "go", "fe", "fu", "ma", "ri", "to", "mu", "ti", "le", "po", "sa", "me", "so", "ri", "ga", "pu", "la", "ge", "mi", "gu", "lo", "fo", "re", "gu", "ra", "fi", "po", "gu", "gui", "re", "ro", "ta", "re", "to", "fi", "la", "tu", "sa", "me", "pi", "tu", "po", "to", "me", "tu", "pa", "pi", "so", "ru", "si", "te", "fo", "ma", "se", "mo", "pi", "sa", "ru", "fa", "re", "si", "ru", "fo", "ro", "se", "mu", "ga", "mi", "fo", "lu", "fi", "pe", "go", "la", "te", "lo", "gui", "fa", "mu", "ra", "se", "gui", "fu", "so", "so", "le", "pu", "sa", "gui", "go", "tu", "ri", "fe", "so", "ga", "fe", "fo", "si", "ma", "lu", "pa", "le", "fi", "lu", "go", "mo", "pe", "lu", "la", "li", "ro", "pu", "mi", "se", "to", "ra", "le", "ro", "ti", "ta", "gu", "ga", "pe", "ri", "pu", "ro", "lo", "lo", "gui", "la", "me", "gu", "se", "ri", "to", "ma", "tu", "mo", "pe", "ri", "fa", "lu", "sa", "ri", "se", "fu", "po", "ri", "ro", "si", "ra", "fe", "mu", "le", "ti", "lo", "pa", "gu", "lo", "se", "ti", "sa", "tu", "la", "gui", "ge", "su", "ro", "ti", "fo", "ri", "ga", "le", "lu", "ge", "gui", "go", "sa", "fu", "fo", "re", "fi", "ma", "ru", "ga", "ti", "re", "mu", "mo", "pi", "mo", "fi", "pa", "ge", "pu", "re", "li", "ro", "ra", "su", "to", "ge", "pi", "ga", "su", "pa", "pi", "fe", "ru", "lo", "li", "go", "pi", "ta", "pe", "su", "pe", "fi", "fo", "fa", "ru", "ro", "fe", "li", "ta", "pu", "ta", "mi", "te", "tu", "so", "fi", "to", "mi", "sa", "re", "ru", "te", "si", "mo", "ta", "pu", "po", "le", "si", "ra", "fu", "ra", "si", "pe", "gu", "to", "gui", "po", "li", "fa", "te", "tu", "fe", "pi", "so", "ga", "lu", "go", "me", "gui", "la", "gu", "ma", "fi", "me", "pu", "fo", "si", "so", "ti", "ma", "se", "fu", "me", "mi", "po", "la", "mu", "so", "te", "mi", "pa", "mu", "fa", "li", "le", "lu", "go", "mi", "me", "pa", "pu", "mi", "mo", "mo", "mu", "ma", "ge", "si", "su", "so", "ra", "mi", "ge", "fo", "fe", "ta", "ti", "fu", "ro", "re", "ra", "lu", "fi", "lo", "so", "fu", "ga", "se", "ri", "mu", "to", "ma", "si", "te", "ro", "le", "ra", "ri", "tu", "so", "ge", "ma", "su", "gui", "to", "to", "ru", "pa", "le", "ti", "pu", "fo", "la", "fi", "pe", "so", "re", "sa", "si", "pu", "fo", "se", "ta", "tu", "ri", "go", "lo", "pu", "la", "me", "gui", "fu", "go", "ta", "li", "se", "go", "pe", "pa", "gui", "mu", "po", "le", "fa", "ru", "li", "fo", "ro", "lu", "fa", "pe", "mi", "lu", "mo", "fa", "pi", "me", "to", "ge", "fa", "mi", "ru", "go", "te", "la", "fu", "si", "so", "fo", "tu", "sa", "te", "pi", "gu", "lo", "sa", "gui", "re", "lo", "se", "la", "pi", "gu", "mo", "fe", "ga", "gu", "ti", "po", "po", "gu", "ta", "re", "li", "tu", "po", "pa", "ri", "fe", "po", "me", "ga", "fi", "su", "to", "pe", "sa", "mu", "pi", "ro", "go", "su", "ra", "fe", "fi", "ru", "ro", "ga", "ti", "le", "mo", "te", "ma", "li", "lu", "lo", "lu", "ro", "ti", "sa", "fe", "me", "ta", "su", "so", "li", "fo", "mu", "ga", "gui", "se", "me", "fi", "gu", "ro", "la", "si", "su", "fo", "pi", "ta", "ge", "fe", "sa", "pu", "go", "pi", "mo", "lu", "fa", "li", "te", "re", "si", "su", "so", "ra", "gui", "ru", "so", "ri", "ma", "te", "ge", "la", "fu", "po", "ri", "to", "fu", "ta", "ri", "re", "te", "gui", "tu", "mo", "ta", "mi", "mu", "po", "fi", "la", "le", "te", "ma", "gu", "to", "si", "po", "tu", "pa", "ti", "le", "le", "ti", "ru", "po", "sa", "li", "pu", "go", "mi", "pa", "me", "pe", "fa", "ru", "mo", "fi", "go", "ru", "ra", "si", "fe", "ge", "mi", "lu", "lo", "ga", "ri", "fu", "to", "li", "ra", "pe", "re", "pa", "tu", "ro", "mi", "so", "su", "la", "pi", "ge", "se", "pi", "mu", "fo", "pa", "pi", "tu", "lo", "gui", "fa", "re", "le", "ra", "lu", "lo", "gui", "lo", "pu", "ma", "mi", "pe", "fe", "ri", "pu", "to", "fa", "ti", "gu", "mo", "si", "ga", "se", "se", "ga", "mu", "fo", "ti", "ro", "gu", "sa", "fi", "me", "pe", "li", "fu", "go", "ma", "fi", "tu", "pa", "me", "si", "to", "le", "ta", "lu", "so", "li", "se", "sa", "pu", "po", "ri", "go", "pa", "le", "su", "ri", "go", "gu", "ta", "ge", "pi", "go", "ge", "sa", "pu", "po", "si", "te", "pa", "su", "so", "gui", "fo", "fa", "me", "tu", "pi", "so", "fu", "ra", "le", "ri", "ro", "se", "la", "ru", "to", "pi", "ge", "ta", "fu", "to", "ti", "po", "ga", "te", "pu", "ti", "ro", "ru", "sa", "te", "ti", "so", "pe", "ra", "fu", "go", "ti", "pe", "ga", "gu", "ro", "fi", "to", "ma", "fe", "gu", "mi", "mo", "pu", "ga", "pe", "gui", "fo", "fe", "ga", "mu", "ro", "fi", "re", "fa", "lu", "mo", "si", "so", "ra", "re", "fu", "gui", "fo", "su", "ma", "fe", "mi", "mo", "re", "fa", "su", "mo", "gui", "me", "la", "mu", "go", "pi", "lo", "sa", "se", "lu", "li", "po", "lu", "fa", "se", "fi", "po", "te", "pa", "tu", "fo", "mi", "le", "ra", "tu", "fo", "li", "ro", "ta", "ge", "mu", "fi", "to", "mu", "la", "re", "li", "lo", "me", "ma", "gu", "lo", "ri", "fe", "ma", "ru", "lo", "mi", "mo", "la", "pe", "ru", "si", "lo", "ra", "fi", "te", "gu", "lo", "su", "fe", "so", "la", "ti", "ru", "pa", "fi", "pe", "po", "ta", "ri", "po", "lu", "pe", "ro", "fa", "li", "me", "su", "go", "tu", "ge", "ro", "ma", "si", "tu", "ra", "gui", "te", "to", "fa", "si", "ro", "mu", "re", "po", "ta", "pi", "fe", "ru", "mo", "ru", "me", "to", "ta", "fi", "gu", "fa", "si", "ge", "so", "sa", "li", "so", "pu", "me", "fo", "ma", "si", "pe", "lu", "po", "fu", "re", "po", "fa", "li", "mu", "sa", "pi", "se", "fo", "ma", "gui", "mo", "fu", "fe", "lo", "pa", "gui", "re", "fu", "so", "gu", "se", "go", "sa", "gui", "su", "la", "mi", "le", "ro", "la", "mi", "fo", "su", "te", "go", "ga", "ti", "se", "pu", "to", "mu", "le", "lo", "ra", "mi", "lu", "ta", "ri", "me", "lo", "ra", "fi", "to", "tu", "ge", "so", "la", "ri", "le", "tu", "fo", "lu", "te", "mo", "ga", "ri", "pu", "ma", "li", "fe", "mo", "ga", "pi", "go", "ru", "se", "mo", "sa", "mi", "ge", "mu", "ro", "pu", "pe", "fo", "pa", "pi", "fu", "ga", "ti", "re", "go", "pa", "ti", "lo", "gu", "le", "to", "ru", "ra", "po", "se", "pi", "te", "so", "ri", "lu", "sa", "pu", "si", "fa", "go", "se", "me", "su", "li", "la", "mo", "ta", "pu", "ga", "ro", "re", "ri", "se", "fo", "li", "tu", "fa", "mu", "pi", "pa", "mo", "te", "ge", "pu", "gui", "ga", "so", "ga", "lu", "sa", "fo", "fe", "li", "pe", "lo", "ti", "su", "ga", "ru", "li", "sa", "ro", "me", "fe", "lu", "fi", "pa", "ro", "la", "mu", "fa", "lo", "ge", "ti", "re", "ro", "fi", "gu", "ma", "fu", "ri", "ma", "so", "le", "se", "gu", "ti", "ma", "go", "ma", "tu", "pa", "go", "le", "gui", "me", "go", "gui", "ru", "ra", "su", "mi", "ra", "fo", "fe", "re", "fu", "mi", "fa", "po", "fa", "fu", "ma", "so", "te", "mi", "fe", "mo", "pi", "pu", "pa", "gu", "gui", "ta", "lo", "ge", "pe", "tu", "pi", "ra", "lo", "ra", "su", "la", "mo", "pe", "fi", "le", "to", "si", "mu", "la", "tu", "fi", "la", "po", "re", "le", "mu", "si", "sa", "fo", "sa", "gu", "ta", "to", "me", "si", "ge", "po", "mi", "fu", "ta", "lu", "ti", "ga", "to", "pe", "te", "ru", "ri", "ta", "to", "pa", "ge", "ro", "pa", "ri", "su", "mo", "mi", "la", "pu", "pe", "ta", "tu", "pe", "mo", "gui", "ri", "le", "ro", "mu", "ga", "to", "pe", "to", "ta", "li", "ru", "ro", "pi", "fa", "su", "se", "pa", "fu", "me", "so", "ti", "si", "se", "to", "fu", "la", "fo", "fe", "go", "ra", "ti", "fu", "po", "ti", "pa", "lu", "fe", "fa", "lu", "ge", "fo", "ri", "gui", "pe", "go", "tu", "sa", "po", "me", "lo", "la", "mi", "mu", "to", "fi", "ra", "tu", "le", "ga", "mu", "te", "lo", "si", "li", "re", "mo", "su", "ra", "so", "te", "so", "sa", "gui", "gu", "so", "gui", "ga", "mu", "te", "la", "ru", "fe", "po", "mi", "ti", "fe", "so", "ru", "fa", "ro", "se", "fo", "ga", "pi", "lu", "lo", "ri", "sa", "gu", "re", "sa", "gu", "se", "go", "pi", "pi", "ge", "po", "pu", "ma", "lo", "re", "mo", "ma", "fi", "pu", "go", "li", "ma", "ru", "ge", "ra", "pu", "le", "ro", "fi", "mi", "me", "lo", "lu", "pa", "go", "le", "po", "fa", "si", "tu", "fo", "si", "ta", "fu", "me", "ma", "su", "re", "to", "li", "fi", "te", "fo", "gu", "ta", "mo", "fu", "li", "fo", "se", "ra", "ti", "sa", "mu", "pe", "fo", "go", "ga", "mi", "fe", "ru", "to", "pe", "mu", "ta", "si", "so", "pu", "mi", "po", "ge", "ga", "ri", "ra", "tu", "fe", "lo", "ro", "fa", "si", "pe", "gu", "ro", "ge", "pu", "pa", "ri", "mo", "mu", "pi", "mo", "me", "la", "gui", "fa", "gu", "te", "to", "to", "ra", "li", "te", "fu", "go", "fe", "fu", "ra", "mi", "ro", "lu", "ti", "so", "le", "pa", "li", "ta", "su", "se", "go", "mo", "ta", "gui", "se", "su", "lo", "re", "lu", "fa", "li", "po", "su", "fi", "lo", "te", "fa", "mi", "pa", "ru", "ge", "ro", "lo", "la", "fi", "me", "tu", "so", "me", "ru", "sa", "ti", "to", "ru", "si", "ro", "fe", "ta", "fi", "ma", "pu", "re", "po", "po", "ma", "pi", "ge", "lu", "fo", "te", "tu", "ga", "fi", "lo", "gu", "gui", "go", "re", "sa", "pi", "ga", "fu", "le", "so", "fo", "pa", "ti", "le", "pu", "po", "se", "su", "la", "gui", "go", "tu", "ri", "to", "pe", "ma", "si", "la", "lu", "me", "mo", "so", "sa", "ri", "re", "mu", "mo", "le", "gu", "ma", "pi", "fo", "pa", "le", "tu", "mo", "fi", "pi", "la", "fu", "ge", "mo", "pa", "to", "si", "fu", "re", "fu", "fe", "sa", "pi", "so", "pe", "la", "pe", "su", "po", "ti", "mi", "pa", "tu", "pe", "po", "ga", "so", "fi", "ru", "le", "su", "le", "la", "ti", "po", "se", "ma", "fe", "mu", "ro", "si", "ri", "ga", "ru", "re", "fo", "ma", "fo", "ti", "gu", "fe", "pu", "me", "fa", "si", "go", "ge", "sa", "me", "lu", "so", "gui", "si", "ra", "su", "te", "lo", "ra", "mo", "mi", "pu", "ge", "tu", "te", "ta", "mi", "lo", "te", "fa", "re", "fu", "go", "mi", "gui", "fa", "lu", "fe", "go", "la", "ro", "gui", "lu", "te", "mu", "pe", "ga", "gui", "mo", "re", "ga", "te", "pu", "fo", "ri", "li", "sa", "pu", "le", "ro", "fa", "po", "ri", "mu", "se", "ru", "ge", "pa", "ri", "ro", "le", "ta", "se", "gu", "lo", "li", "ti", "ma", "gu", "se", "to", "sa", "lo", "pi", "su", "pe", "gu", "se", "ra", "li", "fo", "fe", "ra", "ge", "ru", "to", "pi", "fi", "ta", "mu", "me", "so", "ta", "go", "li", "tu", "me", "lu", "re", "ma", "fi", "to", "me", "mo", "ru", "pa", "pe", "fi", "fu", "ri", "to", "me", "ga", "pi", "po", "pe", "ru", "fa", "se", "ru", "pa", "ri", "go", "pa", "lo", "tu", "fa", "le", "si", "pu", "pi", "ro", "ge", "ma", "ri", "lo", "re", "tu", "ga", "te", "gu", "ra", "ti", "mo", "la", "to", "pu", "sa", "te", "mi", "mu", "ti", "go", "re", "ra", "ti", "to", "te", "pu", "ma", "le", "tu", "fa", "mi", "to", "ta", "ro", "gu", "ga", "se", "ti", "ru", "mi", "mo", "le", "la", "si", "ro", "le", "fu", "la", "fe", "su", "ta", "si", "lo", "ma", "fo", "mu", "la", "ge", "ri", "su", "gui", "po", "pe", "sa", "mi", "so", "ge", "mu", "ra", "me", "fu", "ga", "li", "so", "fa", "go", "fu", "ta", "me", "gui", "tu", "si", "so", "te", "fa", "li", "mo", "fe", "lu", "ta", "pe", "mu", "la", "fi", "ro", "ga", "po", "lu", "ma", "fe", "pi", "lu", "li", "lo", "se", "ta", "gui", "fo", "me", "su", "pa", "ge", "pu", "ma", "pi", "fo", "ra", "so", "su", "ra", "re", "li", "gu", "fi", "fo", "fe", "pa", "fi", "go", "se", "gu", "sa", "re", "lu", "sa", "gui", "po", "sa", "tu", "gui", "to", "ga", "me", "ma", "so", "ti", "su", "me", "sa", "go", "ru", "fi", "le", "pu", "le", "ta", "si", "go", "fo", "ru", "ti", "ro", "ma", "fe", "fa", "po", "ri", "ru", "te", "ga", "ro", "gu", "mi", "te", "tu", "re", "ra", "mi", "mo", "to", "gu", "li", "fo", "ta", "re", "ta", "mo", "fi", "mu", "re", "ta", "po", "tu", "pi", "me", "lu", "pe", "sa", "pi", "ro", "po", "pu", "pi", "mo", "sa", "le", "sa", "to", "si", "lu", "se", "fa", "mo", "lu", "ti", "pe", "fu", "se", "fa", "ri", "lo", "lo", "su", "ri", "so", "pa", "ge", "pa", "fo", "pi", "tu", "pe", "pa", "so", "mu", "ri", "ge", "ru", "fe", "ma", "li", "fo", "ro", "fu", "si", "po", "fa", "pe", "la", "go", "li", "fu", "ge", "la", "fo", "fu", "gui", "fe", "mu", "me", "pa", "ti", "to", "go", "lu", "fi", "go", "ra", "te", "ga", "ro", "gui", "gu", "le", "ra", "lo", "su", "si", "se", "gu", "te", "la", "gui", "so", "so", "mu", "mi", "lo", "la", "se", "ra", "lo", "mi", "pu", "fe", "ma", "to", "pu", "li", "re", "su", "ge", "ga", "fi", "po", "mo", "so", "tu", "re", "mi", "ra", "su", "pa", "to", "fi", "le", "li", "mu", "fa", "me", "mo", "fo", "pi", "se", "ma", "fu", "ru", "po", "su", "me", "ti", "fa", "lu", "ma", "lo", "ri", "se", "gui", "tu", "ra", "te", "to", "to", "fi", "re", "fa", "gu", "lu", "lo", "gu", "le", "li", "sa", "fu", "la", "so", "si", "re", "pi", "su", "ma", "ge", "ro", "po", "gui", "le", "ra", "ru", "mu", "fo", "ru", "pe", "pi", "la", "gu", "sa", "go", "pi", "pe", "ti", "gu", "sa", "re", "fo", "lo", "ri", "pe", "ta", "pu", "su", "to", "pu", "se", "fi", "ta", "ru", "ta", "fo", "li", "ge", "fi", "fu", "ga", "le", "po", "ro", "ti", "me", "ga", "lu", "fu", "go", "fu", "fe", "gui", "pa", "mu", "fa", "ro", "ti", "me", "si", "lu", "ta", "fe", "so", "go", "li", "fe", "la", "su", "tu", "mo", "lu", "te", "ri", "ma", "tu", "ga", "po", "gui", "te", "ri", "ru", "la", "se", "lo", "so", "mi", "ge", "pa", "mu", "gu", "ro", "mu", "ge", "si", "ga", "pu", "ra", "mo", "mi", "fe", "mi", "pu", "pa", "pe", "go", "mo", "si", "te", "sa", "tu", "pu", "ro", "te", "ti", "sa", "su", "fi", "pu", "ta", "to", "me", "lo", "mi", "ge", "ru", "ga", "pi", "ra", "po", "se", "gu", "ro", "go", "pe", "ri", "ga", "ru", "gui", "mu", "ma", "po", "te", "so", "si", "pe", "mu", "pa", "ti", "ta", "so", "le", "tu", "to", "lo", "fe", "fi", "la", "lu", "si", "su", "fa", "ro", "re", "mo", "li", "me", "gu", "ta", "li", "ma", "fo", "re", "mu", "lo", "mo", "ge", "gui", "ra", "fu", "ri", "tu", "ga", "lo", "pe", "fo", "ti", "le", "lu", "fa", "gui", "sa", "go", "ge", "su", "fo", "to", "le", "pi", "ta", "tu", "ti", "gu", "ra", "fo", "ge", "to", "fi", "re", "fu", "ma", "fi", "la", "to", "me", "ru", "go", "so", "se", "mi", "pa", "gu", "mi", "ru", "sa", "so", "le", "ro", "ri", "se", "pu", "sa", "mi", "pa", "mo", "fe", "pu", "mo", "po", "re", "si", "fa", "pu", "li", "lu", "la", "mo", "fe", "go", "gui", "te", "tu", "ra", "si", "fa", "ro", "pe", "fu", "po", "fo", "me", "li", "ma", "mu", "pi", "fu", "pa", "go", "se", "po", "pi", "fe", "su", "la", "ri", "ga", "lo", "te", "lu", "so", "ri", "fa", "fu", "te", "lo", "mi", "ra", "te", "lo", "su", "ga", "lo", "fe", "gui", "tu", "pe", "pu", "fo", "ti", "sa", "si", "si", "ra", "tu", "ge", "mo", "pi", "la", "le", "mo", "tu", "ma", "ro", "pe", "li", "gu", "ge", "fu", "lo", "fi", "fa", "fi", "ti", "ta", "su", "fe", "so", "ri", "fa", "re", "po", "mu", "fa", "fo", "le", "pi", "lu", "fe", "su", "ro", "mi", "pa", "pi", "fi", "ga", "mu", "se", "fo", "ti", "sa", "se", "so", "gu", "pa", "so", "re", "si", "ru", "re", "tu", "mo", "li", "la", "ri", "pi", "pa", "lu", "le", "po", "li", "ma", "me", "fo", "fu", "ta", "to", "ge", "fi", "fu", "te", "ru", "po", "gui", "ga", "mi", "li", "sa", "pu", "pe", "to", "si", "ta", "fe", "to", "ru", "la", "mo", "me", "mi", "pu", "le", "mu", "to", "si", "ma", "li", "mi", "ma", "gu", "me", "ro", "fi", "ga", "ge", "ro", "pu", "ra", "go", "te", "ti", "su", "se", "lu", "go", "pi", "ra", "ti", "gui", "la", "ru", "re", "go", "gui", "pa", "pe", "go", "lu", "sa", "po", "se", "ri", "mu", "me", "gu", "so", "ri", "ta", "gui"]
syllables = ["toh"]
unique_syllables = list(dict.fromkeys(syllables))
for x in unique_syllables:
tts = gTTS(x, lang='es')
filename = "%s.mp3" % x
tts.save(filename)
|
[
"\n# Generate female voice syllablde using Google TTS API\n\n# Go to home directory that contains env folder and run source env/bin/activate\n# Then run the Python script with python EyeSound_generate.py\n\n# from gtts import gTTS\n\nfrom gtts import *\n\n# syllables = [\"fa\", \"to\", \"se\", \"pu\", \"ti\", \"mi\", \"ro\", \"pe\", \"fa\", \"lu\", \"to\", \"la\", \"si\", \"su\", \"fe\", \"le\", \"si\", \"so\", \"la\", \"mu\", \"fe\", \"ga\", \"fo\", \"fe\", \"fu\", \"pi\", \"li\", \"mo\", \"re\", \"sa\", \"su\", \"po\", \"fa\", \"fi\", \"gu\", \"le\", \"me\", \"pi\", \"fo\", \"ta\", \"tu\", \"pe\", \"la\", \"ro\", \"re\", \"ru\", \"fi\", \"ri\", \"po\", \"le\", \"ga\", \"fu\", \"go\", \"ra\", \"gui\", \"ru\", \"pe\", \"se\", \"li\", \"mo\", \"pa\", \"pu\", \"me\", \"sa\", \"po\", \"ge\", \"tu\", \"gui\", \"fi\", \"to\", \"fe\", \"pa\", \"pu\", \"ro\", \"pa\", \"ri\", \"pu\", \"ge\", \"pe\", \"ri\", \"to\", \"ra\", \"ru\", \"te\", \"ma\", \"go\", \"me\", \"lu\", \"li\", \"ti\", \"lo\", \"me\", \"ra\", \"mu\", \"so\", \"ga\", \"pi\", \"tu\", \"re\", \"re\", \"mi\", \"po\", \"ga\", \"gu\", \"se\", \"ra\", \"mo\", \"te\", \"su\", \"mi\", \"si\", \"so\", \"ge\", \"ta\", \"ru\", \"fo\", \"ta\", \"ti\", \"mu\", \"se\", \"fe\", \"ti\", \"go\", \"ma\", \"lu\", \"ge\", \"pa\", \"lo\", \"pe\", \"gu\", \"ri\", \"pi\", \"go\", \"te\", \"la\", \"tu\", \"mo\", \"ma\", \"li\", \"fu\", \"me\", \"ge\", \"fi\", \"lo\", \"fa\", \"fu\", \"re\", \"ta\", \"so\", \"le\", \"mu\", \"si\", \"gui\", \"fo\", \"se\", \"ma\", \"gu\", \"lo\", \"sa\", \"mi\", \"lu\", \"te\", \"te\", \"gui\", \"ro\", \"sa\", \"su\", \"le\", \"ru\", \"ri\", \"la\", \"me\", \"to\", \"lu\", \"te\", \"mi\", \"ga\", \"to\", \"gui\", \"lo\", \"me\", \"ga\", \"tu\", \"re\", \"lo\", \"ma\", \"pi\", \"gu\", \"lo\", \"fu\", \"ti\", \"fa\", \"se\", \"po\", \"ru\", \"re\", \"si\", \"ta\", \"po\", \"pi\", \"mo\", \"le\", \"sa\", \"lu\", \"fe\", \"mo\", \"fa\", \"li\", \"tu\", \"ro\", \"mu\", \"mi\", \"pa\", \"ge\", \"go\", \"gu\", \"ge\", \"li\", \"fa\", \"lo\", \"ti\", \"so\", \"te\", \"pa\", \"mu\", \"ge\", \"to\", \"ta\", \"ti\", \"ru\", \"so\", \"tu\", \"li\", \"ga\", \"re\", \"lo\", \"tu\", \"le\", \"ri\", \"sa\", \"ro\", \"mi\", \"po\", \"se\", \"ra\", \"fu\", \"te\", \"fo\", \"ra\", \"mi\", \"mu\", \"fo\", \"su\", \"gui\", \"ra\", \"le\", \"ro\", \"su\", \"fe\", \"pi\", \"ma\", \"mo\", \"si\", \"ro\", \"ge\", \"ma\", \"gu\", \"se\", \"go\", \"sa\", \"ri\", \"pu\", \"go\", \"gu\", \"pi\", \"ma\", \"pe\", \"fo\", \"fu\", \"se\", \"gui\", \"ra\", \"so\", \"ri\", \"to\", \"re\", \"la\", \"su\", \"le\", \"po\", \"ga\", \"fi\", \"fu\", \"mo\", \"lu\", \"si\", \"ta\", \"fe\", \"mo\", \"pu\", \"me\", \"ti\", \"pa\", \"go\", \"li\", \"fo\", \"pe\", \"fa\", \"ru\", \"me\", \"so\", \"pa\", \"si\", \"lu\", \"to\", \"pu\", \"fi\", \"sa\", \"te\", \"so\", \"mu\", \"pe\", \"fi\", \"la\", \"fo\", \"fi\", \"go\", \"fe\", \"ta\", \"pu\", \"pe\", \"ro\", \"la\", \"gui\", \"su\", \"po\", \"ro\", \"gu\", \"ma\", \"me\", \"mi\", \"ga\", \"fi\", \"go\", \"pu\", \"le\", \"fa\", \"mu\", \"fe\", \"mo\", \"ti\", \"te\", \"to\", \"li\", \"pa\", \"gu\", \"se\", \"to\", \"lu\", \"ga\", \"ge\", \"ri\", \"la\", \"gui\", \"mo\", \"mu\", \"te\", \"ga\", \"fu\", \"te\", \"ro\", \"mi\", \"ge\", \"fo\", \"pi\", \"sa\", \"lu\", \"le\", \"fo\", \"su\", \"sa\", \"le\", \"fi\", \"pa\", \"li\", \"fo\", \"tu\", \"pe\", \"pa\", \"ru\", \"ge\", \"go\", \"si\", \"me\", \"ro\", \"ti\", \"ma\", \"mu\", \"me\", \"lo\", \"pu\", \"fa\", \"se\", \"ti\", \"ma\", \"ri\", \"so\", \"ru\", \"fe\", \"ma\", \"lu\", \"se\", \"to\", \"li\", \"fe\", \"go\", \"ri\", \"la\", \"pu\", \"fe\", \"mo\", \"mu\", \"ta\", \"pe\", \"pi\", \"sa\", \"mi\", \"to\", \"su\", \"re\", \"ta\", \"gu\", \"le\", \"po\", \"pi\", \"se\", \"mo\", \"si\", \"fa\", \"fu\", \"re\", \"po\", \"tu\", \"ra\", \"re\", \"gui\", \"ta\", \"si\", \"po\", \"fu\", \"ge\", \"la\", \"tu\", \"re\", \"lo\", \"ri\", \"le\", \"lo\", \"mi\", \"ta\", \"su\", \"ge\", \"go\", \"fu\", \"pa\", \"te\", \"li\", \"ra\", \"ti\", \"lo\", \"gu\", \"se\", \"sa\", \"su\", \"pe\", \"so\", \"gui\", \"pe\", \"po\", \"gui\", \"ga\", \"ru\", \"pe\", \"so\", \"ru\", \"la\", \"fe\", \"si\", \"fa\", \"pi\", \"ro\", \"lu\", \"me\", \"ra\", \"pu\", \"me\", \"fo\", \"fi\", \"re\", \"so\", \"fi\", \"ra\", \"tu\", \"te\", \"fo\", \"sa\", \"li\", \"mu\", \"fe\", \"ri\", \"sa\", \"me\", \"fo\", \"mu\", \"sa\", \"gui\", \"po\", \"le\", \"su\", \"me\", \"fi\", \"ta\", \"so\", \"pu\", \"ra\", \"go\", \"ga\", \"mi\", \"tu\", \"pe\", \"pi\", \"ga\", \"le\", \"po\", \"ru\", \"fa\", \"ri\", \"mo\", \"se\", \"fu\", \"pe\", \"si\", \"la\", \"ro\", \"tu\", \"ma\", \"lo\", \"la\", \"pi\", \"gu\", \"re\", \"fi\", \"la\", \"re\", \"so\", \"su\", \"la\", \"ti\", \"fo\", \"me\", \"gu\", \"se\", \"ri\", \"pa\", \"go\", \"mu\", \"fa\", \"to\", \"fa\", \"ti\", \"pu\", \"me\", \"li\", \"fa\", \"fe\", \"to\", \"tu\", \"ma\", \"mi\", \"go\", \"pe\", \"mu\", \"ge\", \"gui\", \"ga\", \"po\", \"gu\", \"ga\", \"po\", \"pa\", \"gui\", \"lu\", \"le\", \"gui\", \"ma\", \"ge\", \"ro\", \"gu\", \"ga\", \"li\", \"ro\", \"ge\", \"pu\", \"te\", \"li\", \"fa\", \"to\", \"lu\", \"sa\", \"ro\", \"ta\", \"ri\", \"su\", \"te\", \"si\", \"ta\", \"pe\", \"go\", \"pu\", \"pa\", \"pi\", \"lo\", \"te\", \"ru\", \"fe\", \"pi\", \"sa\", \"fo\", \"fu\", \"pa\", \"so\", \"ra\", \"si\", \"fu\", \"se\", \"mi\", \"pa\", \"te\", \"mo\", \"fu\", \"ta\", \"si\", \"to\", \"re\", \"tu\", \"le\", \"mi\", \"ma\", \"mo\", \"su\", \"ta\", \"mo\", \"ma\", \"fi\", \"ru\", \"ge\", \"ti\", \"ra\", \"se\", \"lo\", \"lu\", \"ra\", \"fi\", \"so\", \"fe\", \"lu\", \"re\", \"ti\", \"ra\", \"lo\", \"ru\", \"la\", \"tu\", \"fa\", \"se\", \"ro\", \"ti\", \"li\", \"ge\", \"pa\", \"tu\", \"fo\", \"gu\", \"mo\", \"gui\", \"se\", \"ta\", \"me\", \"ra\", \"po\", \"pu\", \"li\", \"gui\", \"lu\", \"la\", \"fe\", \"go\", \"gui\", \"gui\", \"pe\", \"ma\", \"lu\", \"go\", \"tu\", \"go\", \"pi\", \"le\", \"ra\", \"re\", \"ta\", \"to\", \"tu\", \"ri\", \"si\", \"mu\", \"ra\", \"le\", \"to\", \"mi\", \"ti\", \"le\", \"fa\", \"gu\", \"so\", \"pu\", \"ro\", \"li\", \"re\", \"la\", \"ge\", \"fa\", \"so\", \"lu\", \"ti\", \"pi\", \"fu\", \"pa\", \"pe\", \"so\", \"pi\", \"ri\", \"se\", \"ga\", \"pu\", \"to\", \"ru\", \"so\", \"mi\", \"fe\", \"ma\", \"le\", \"ga\", \"fo\", \"ru\", \"fi\", \"ri\", \"gu\", \"ma\", \"me\", \"fo\", \"fi\", \"si\", \"re\", \"la\", \"mu\", \"po\", \"lu\", \"po\", \"ti\", \"me\", \"pa\", \"fe\", \"pa\", \"lo\", \"su\", \"gui\", \"fi\", \"pu\", \"ga\", \"te\", \"mo\", \"si\", \"fi\", \"te\", \"sa\", \"fu\", \"ro\", \"su\", \"lo\", \"fi\", \"te\", \"sa\", \"pe\", \"ma\", \"ro\", \"gu\", \"si\", \"ti\", \"su\", \"sa\", \"ge\", \"lo\", \"ri\", \"mi\", \"fe\", \"ra\", \"ru\", \"mo\", \"mu\", \"to\", \"ri\", \"ge\", \"fa\", \"te\", \"sa\", \"mo\", \"fu\", \"pi\", \"mi\", \"ru\", \"ta\", \"re\", \"po\", \"li\", \"pi\", \"me\", \"ta\", \"su\", \"lo\", \"fu\", \"fo\", \"si\", \"pe\", \"ga\", \"se\", \"la\", \"go\", \"mu\", \"mi\", \"li\", \"ra\", \"ri\", \"le\", \"lu\", \"go\", \"ri\", \"te\", \"to\", \"pa\", \"pu\", \"ma\", \"ti\", \"ru\", \"pe\", \"go\", \"su\", \"si\", \"te\", \"ra\", \"po\", \"ga\", \"ma\", \"gui\", \"fe\", \"tu\", \"mo\", \"fi\", \"re\", \"mo\", \"ma\", \"lu\", \"la\", \"mi\", \"fu\", \"re\", \"to\", \"ru\", \"ri\", \"me\", \"sa\", \"so\", \"fa\", \"pa\", \"li\", \"re\", \"pu\", \"to\", \"li\", \"pe\", \"go\", \"ga\", \"su\", \"sa\", \"ri\", \"pu\", \"me\", \"so\", \"lu\", \"pi\", \"ge\", \"ma\", \"lo\", \"ra\", \"fa\", \"fi\", \"se\", \"fu\", \"ro\", \"pi\", \"me\", \"po\", \"ta\", \"fu\", \"ga\", \"pi\", \"su\", \"te\", \"mo\", \"gu\", \"mi\", \"pe\", \"pa\", \"mo\", \"ma\", \"ta\", \"si\", \"te\", \"ru\", \"so\", \"gui\", \"se\", \"so\", \"la\", \"mu\", \"fa\", \"li\", \"lu\", \"se\", \"lo\", \"fu\", \"li\", \"le\", \"la\", \"ro\", \"sa\", \"ga\", \"ti\", \"pe\", \"mu\", \"fo\", \"mi\", \"ge\", \"lo\", \"sa\", \"ru\", \"pa\", \"gui\", \"gu\", \"ge\", \"ro\", \"tu\", \"gui\", \"se\", \"ga\", \"to\", \"pa\", \"la\", \"pi\", \"me\", \"su\", \"lo\", \"si\", \"fe\", \"ro\", \"ra\", \"tu\", \"ta\", \"fi\", \"tu\", \"fe\", \"fo\", \"mu\", \"ti\", \"re\", \"fa\", \"fo\", \"ta\", \"sa\", \"mi\", \"ge\", \"gu\", \"po\", \"ti\", \"le\", \"fo\", \"fa\", \"gu\", \"ra\", \"si\", \"mu\", \"le\", \"po\", \"pu\", \"fi\", \"fe\", \"ta\", \"go\", \"la\", \"po\", \"te\", \"mu\", \"pi\", \"ma\", \"su\", \"ga\", \"ge\", \"mi\", \"so\", \"gu\", \"li\", \"re\", \"pa\", \"to\", \"mu\", \"mo\", \"se\", \"sa\", \"li\", \"me\", \"mo\", \"le\", \"gu\", \"li\", \"fa\", \"pu\", \"la\", \"re\", \"li\", \"lo\", \"ru\", \"mi\", \"te\", \"ma\", \"go\", \"ru\", \"so\", \"le\", \"ga\", \"fi\", \"pe\", \"lo\", \"ge\", \"tu\", \"mi\", \"ta\", \"lu\", \"ra\", \"me\", \"fi\", \"mo\", \"tu\", \"fi\", \"ge\", \"sa\", \"po\", \"gu\", \"ro\", \"re\", \"la\", \"si\", \"re\", \"to\", \"pe\", \"ru\", \"ti\", \"ra\", \"fu\", \"ta\", \"le\", \"ri\", \"po\", \"lu\", \"ti\", \"me\", \"fa\", \"lo\", \"fu\", \"lo\", \"pe\", \"ra\", \"gui\", \"ge\", \"so\", \"se\", \"fu\", \"ri\", \"la\", \"gu\", \"ma\", \"fe\", \"pi\", \"to\", \"fu\", \"gui\", \"fe\", \"ga\", \"so\", \"tu\", \"po\", \"ge\", \"fa\", \"mi\", \"se\", \"ro\", \"fe\", \"su\", \"si\", \"pa\", \"ru\", \"fa\", \"pe\", \"gui\", \"go\", \"mu\", \"ri\", \"le\", \"ra\", \"mo\", \"pu\", \"to\", \"me\", \"ma\", \"pi\", \"te\", \"fo\", \"me\", \"pu\", \"gui\", \"sa\", \"mu\", \"sa\", \"se\", \"si\", \"ro\", \"su\", \"si\", \"pe\", \"ta\", \"fo\", \"su\", \"fo\", \"te\", \"pa\", \"ri\", \"fe\", \"go\", \"re\", \"lu\", \"fi\", \"ga\", \"tu\", \"pa\", \"te\", \"ti\", \"fo\", \"pu\", \"pi\", \"se\", \"la\", \"ro\", \"lu\", \"go\", \"fe\", \"ta\", \"ti\", \"le\", \"fa\", \"so\", \"gui\", \"su\", \"ge\", \"po\", \"pa\", \"li\", \"te\", \"lu\", \"fa\", \"ru\", \"se\", \"gui\", \"to\", \"mo\", \"ga\", \"pi\", \"ru\", \"fe\", \"su\", \"ra\", \"go\", \"pi\", \"gu\", \"te\", \"go\", \"ma\", \"pi\", \"le\", \"mu\", \"ta\", \"gu\", \"ge\", \"ti\", \"so\", \"go\", \"sa\", \"ri\", \"mu\", \"ge\", \"tu\", \"la\", \"to\", \"fi\", \"lu\", \"le\", \"to\", \"sa\", \"ri\", \"se\", \"gu\", \"ga\", \"su\", \"fe\", \"mi\", \"ro\", \"to\", \"ma\", \"mi\", \"tu\", \"pe\", \"gu\", \"pa\", \"po\", \"mi\", \"mu\", \"me\", \"mo\", \"ta\", \"gui\", \"re\", \"tu\", \"sa\", \"mu\", \"te\", \"li\", \"fo\", \"ro\", \"ta\", \"si\", \"pu\", \"le\", \"pu\", \"sa\", \"fo\", \"ti\", \"pu\", \"pe\", \"fo\", \"la\", \"ti\", \"ge\", \"su\", \"ma\", \"lu\", \"le\", \"pi\", \"lo\", \"fo\", \"la\", \"fi\", \"su\", \"te\", \"fu\", \"ma\", \"lo\", \"si\", \"tu\", \"se\", \"lo\", \"ra\", \"fi\", \"me\", \"ru\", \"pa\", \"fu\", \"me\", \"fi\", \"mo\", \"po\", \"fa\", \"gui\", \"lu\", \"se\", \"ru\", \"ga\", \"mo\", \"ri\", \"ru\", \"re\", \"so\", \"fa\", \"si\", \"pe\", \"fu\", \"ra\", \"tu\", \"pe\", \"ri\", \"po\", \"lo\", \"ra\", \"ti\", \"fu\", \"re\", \"lu\", \"ta\", \"ro\", \"li\", \"fu\", \"fe\", \"ro\", \"ga\", \"mi\", \"fe\", \"pu\", \"la\", \"pu\", \"re\", \"si\", \"go\", \"so\", \"pa\", \"li\", \"gu\", \"me\", \"mu\", \"mi\", \"ga\", \"te\", \"ru\", \"lo\", \"fe\", \"li\", \"go\", \"gu\", \"la\", \"te\", \"ga\", \"ro\", \"gu\", \"gui\", \"mo\", \"fa\", \"me\", \"pi\", \"lu\", \"fe\", \"ri\", \"fa\", \"le\", \"tu\", \"fo\", \"se\", \"fi\", \"fo\", \"pu\", \"ra\", \"re\", \"la\", \"go\", \"pu\", \"ti\", \"to\", \"ma\", \"pe\", \"si\", \"pu\", \"me\", \"gui\", \"ma\", \"re\", \"gu\", \"po\", \"le\", \"pi\", \"mo\", \"mu\", \"ga\", \"ge\", \"ma\", \"to\", \"su\", \"si\", \"so\", \"ga\", \"fe\", \"mi\", \"mu\", \"se\", \"ti\", \"ta\", \"me\", \"su\", \"to\", \"pe\", \"gui\", \"lo\", \"lu\", \"ta\", \"fe\", \"fa\", \"po\", \"fu\", \"pi\", \"ro\", \"ta\", \"le\", \"li\", \"fu\", \"re\", \"si\", \"ra\", \"se\", \"fu\", \"ro\", \"te\", \"mi\", \"so\", \"ru\", \"ma\", \"se\", \"pa\", \"mo\", \"tu\", \"ri\", \"po\", \"pa\", \"te\", \"fi\", \"gu\", \"te\", \"li\", \"sa\", \"ge\", \"mu\", \"mo\", \"ge\", \"ti\", \"ro\", \"su\", \"pa\", \"pe\", \"ra\", \"so\", \"ru\", \"fi\", \"go\", \"ra\", \"ge\", \"ri\", \"tu\", \"le\", \"pi\", \"pa\", \"pe\", \"pu\", \"go\", \"re\", \"si\", \"po\", \"fu\", \"fa\", \"le\", \"ta\", \"fo\", \"lu\", \"li\", \"fo\", \"sa\", \"re\", \"ti\", \"su\", \"ge\", \"fi\", \"la\", \"fe\", \"lu\", \"so\", \"me\", \"ri\", \"to\", \"tu\", \"sa\", \"me\", \"sa\", \"lo\", \"mu\", \"mi\", \"lo\", \"la\", \"se\", \"gui\", \"ru\", \"pe\", \"ga\", \"li\", \"le\", \"fo\", \"su\", \"se\", \"ro\", \"ri\", \"gu\", \"pa\", \"fi\", \"le\", \"fa\", \"po\", \"mu\", \"se\", \"mi\", \"gu\", \"sa\", \"go\", \"go\", \"pa\", \"si\", \"fe\", \"po\", \"lu\", \"ge\", \"fo\", \"mi\", \"fu\", \"ra\", \"ri\", \"re\", \"ta\", \"go\", \"gu\", \"te\", \"fi\", \"mu\", \"la\", \"to\", \"po\", \"la\", \"ri\", \"pe\", \"ro\", \"ru\", \"pe\", \"go\", \"fi\", \"pu\", \"ma\", \"pi\", \"pe\", \"ma\", \"mo\", \"ru\", \"le\", \"gui\", \"ru\", \"ra\", \"lo\", \"so\", \"ra\", \"mi\", \"re\", \"so\", \"fu\", \"fe\", \"to\", \"ti\", \"tu\", \"fa\", \"li\", \"ge\", \"la\", \"fo\", \"lu\", \"fe\", \"li\", \"su\", \"ga\", \"po\", \"lo\", \"ta\", \"pi\", \"ge\", \"to\", \"gu\", \"me\", \"lo\", \"gui\", \"lu\", \"sa\", \"gui\", \"te\", \"pa\", \"ro\", \"tu\", \"pe\", \"si\", \"pu\", \"pa\", \"so\", \"fo\", \"sa\", \"ti\", \"te\", \"mo\", \"mu\", \"le\", \"po\", \"si\", \"mu\", \"ta\", \"mi\", \"me\", \"ra\", \"so\", \"fu\", \"ge\", \"ri\", \"fu\", \"ta\", \"ro\", \"mo\", \"ma\", \"gui\", \"se\", \"lo\", \"tu\", \"te\", \"so\", \"li\", \"su\", \"ga\", \"ti\", \"se\", \"sa\", \"lo\", \"pu\", \"me\", \"pi\", \"tu\", \"fa\", \"mo\", \"to\", \"fa\", \"fi\", \"me\", \"go\", \"pu\", \"re\", \"mo\", \"pi\", \"ru\", \"la\", \"si\", \"fe\", \"ga\", \"to\", \"su\", \"re\", \"ti\", \"lu\", \"ma\", \"fo\", \"ro\", \"pe\", \"pi\", \"lo\", \"ga\", \"tu\", \"ga\", \"ri\", \"le\", \"ro\", \"pu\", \"gu\", \"re\", \"so\", \"ga\", \"si\", \"ta\", \"tu\", \"fo\", \"ge\", \"mi\", \"pi\", \"me\", \"ri\", \"go\", \"pa\", \"gu\", \"ma\", \"gui\", \"re\", \"so\", \"gu\", \"ru\", \"fe\", \"go\", \"ta\", \"ti\", \"ma\", \"gu\", \"ro\", \"fe\", \"fi\", \"fi\", \"fe\", \"mi\", \"fo\", \"ra\", \"pu\", \"ra\", \"fi\", \"me\", \"fo\", \"fu\", \"tu\", \"me\", \"lo\", \"sa\", \"pi\", \"ga\", \"pu\", \"go\", \"re\", \"si\", \"mi\", \"te\", \"si\", \"so\", \"ta\", \"su\", \"fa\", \"pi\", \"te\", \"lo\", \"mu\", \"fu\", \"le\", \"to\", \"pa\", \"mi\", \"pa\", \"mu\", \"lo\", \"te\", \"gui\", \"ri\", \"ge\", \"ti\", \"mo\", \"fa\", \"ru\", \"pa\", \"ti\", \"pe\", \"po\", \"su\", \"su\", \"pe\", \"fo\", \"ma\", \"fi\", \"sa\", \"fu\", \"po\", \"pe\", \"ri\", \"gui\", \"le\", \"fi\", \"po\", \"sa\", \"mu\", \"la\", \"li\", \"se\", \"go\", \"tu\", \"lu\", \"se\", \"po\", \"ra\", \"ri\", \"la\", \"lu\", \"mo\", \"le\", \"ti\", \"li\", \"re\", \"gui\", \"ro\", \"ma\", \"fu\", \"ta\", \"si\", \"ge\", \"to\", \"lu\", \"mu\", \"te\", \"ro\", \"fa\", \"gui\", \"fa\", \"su\", \"so\", \"se\", \"pi\", \"si\", \"se\", \"li\", \"to\", \"la\", \"lu\", \"sa\", \"mi\", \"fe\", \"mo\", \"ru\", \"pu\", \"ge\", \"mo\", \"la\", \"li\", \"ra\", \"ru\", \"to\", \"me\", \"li\", \"ti\", \"la\", \"ti\", \"lu\", \"se\", \"lo\", \"mo\", \"fa\", \"re\", \"fu\", \"si\", \"tu\", \"la\", \"fe\", \"lo\", \"fi\", \"lu\", \"gui\", \"re\", \"mo\", \"pa\", \"ro\", \"ra\", \"pi\", \"tu\", \"re\", \"ro\", \"ro\", \"ma\", \"te\", \"mu\", \"gui\", \"gu\", \"ra\", \"me\", \"po\", \"gui\", \"gu\", \"fi\", \"ge\", \"lo\", \"ga\", \"mo\", \"ta\", \"fi\", \"gu\", \"me\", \"go\", \"so\", \"sa\", \"ge\", \"su\", \"li\", \"su\", \"ga\", \"ge\", \"so\", \"ti\", \"su\", \"li\", \"se\", \"so\", \"ra\", \"po\", \"sa\", \"ri\", \"mu\", \"fe\", \"to\", \"po\", \"pa\", \"le\", \"gu\", \"ri\", \"ru\", \"fa\", \"re\", \"mo\", \"mi\", \"mu\", \"pi\", \"pe\", \"fo\", \"ma\", \"to\", \"fa\", \"si\", \"pu\", \"ge\", \"fo\", \"go\", \"ra\", \"fe\", \"lu\", \"ti\", \"mu\", \"ta\", \"pe\", \"ro\", \"pi\", \"ru\", \"mi\", \"fe\", \"ro\", \"fa\", \"fo\", \"pa\", \"mi\", \"ru\", \"le\", \"po\", \"to\", \"ta\", \"se\", \"ru\", \"pi\", \"lu\", \"pa\", \"le\", \"to\", \"li\", \"pu\", \"si\", \"me\", \"to\", \"sa\", \"go\", \"ma\", \"gui\", \"fu\", \"te\", \"so\", \"fo\", \"la\", \"me\", \"tu\", \"fi\", \"fu\", \"sa\", \"te\", \"go\", \"si\", \"fu\", \"ri\", \"le\", \"go\", \"ta\", \"lo\", \"ga\", \"li\", \"su\", \"pe\", \"mo\", \"lo\", \"ga\", \"pe\", \"pu\", \"mi\", \"pu\", \"ma\", \"se\", \"fo\", \"ri\", \"tu\", \"ti\", \"te\", \"po\", \"la\", \"so\", \"ta\", \"re\", \"ro\", \"pi\", \"ru\", \"gui\", \"fu\", \"se\", \"ga\", \"mo\", \"fi\", \"fa\", \"po\", \"se\", \"gu\", \"re\", \"ma\", \"pi\", \"to\", \"pu\", \"mo\", \"ga\", \"le\", \"go\", \"si\", \"lu\", \"ri\", \"su\", \"te\", \"ta\", \"fo\", \"ti\", \"ra\", \"mo\", \"pe\", \"pu\", \"te\", \"pa\", \"si\", \"fo\", \"lu\", \"fo\", \"la\", \"se\", \"po\", \"gui\", \"pu\", \"li\", \"gu\", \"re\", \"ma\", \"to\", \"li\", \"ma\", \"so\", \"me\", \"lu\", \"me\", \"ga\", \"gui\", \"po\", \"su\", \"go\", \"pa\", \"pe\", \"mo\", \"fi\", \"tu\", \"pi\", \"ru\", \"me\", \"pa\", \"po\", \"si\", \"ta\", \"ro\", \"ge\", \"fu\", \"pe\", \"sa\", \"fi\", \"so\", \"ru\", \"ro\", \"ra\", \"fe\", \"to\", \"mi\", \"su\", \"mi\", \"lu\", \"le\", \"la\", \"so\", \"gui\", \"pa\", \"go\", \"le\", \"ru\", \"le\", \"ta\", \"ti\", \"lo\", \"fu\", \"po\", \"sa\", \"ge\", \"lo\", \"li\", \"mu\", \"si\", \"pu\", \"fe\", \"ra\", \"go\", \"mi\", \"ga\", \"lo\", \"fe\", \"su\", \"ge\", \"ra\", \"li\", \"mo\", \"tu\", \"to\", \"fa\", \"te\", \"fo\", \"ti\", \"fu\", \"ti\", \"tu\", \"ge\", \"fa\", \"lo\", \"pi\", \"la\", \"fo\", \"te\", \"tu\", \"se\", \"fa\", \"ri\", \"go\", \"mu\", \"so\", \"ma\", \"me\", \"so\", \"ri\", \"gu\", \"fi\", \"mu\", \"pe\", \"sa\", \"ro\", \"ri\", \"sa\", \"to\", \"re\", \"mu\", \"fe\", \"la\", \"mi\", \"ro\", \"gu\", \"lo\", \"sa\", \"ge\", \"lu\", \"ri\", \"go\", \"gu\", \"ri\", \"go\", \"ra\", \"le\", \"lu\", \"ri\", \"re\", \"so\", \"ga\", \"me\", \"su\", \"gui\", \"fa\", \"to\", \"so\", \"ma\", \"re\", \"ru\", \"si\", \"lo\", \"fu\", \"si\", \"lo\", \"pa\", \"re\", \"fu\", \"gui\", \"fe\", \"mo\", \"ta\", \"ge\", \"gu\", \"si\", \"sa\", \"lo\", \"fo\", \"la\", \"pe\", \"gu\", \"mi\", \"mo\", \"tu\", \"gui\", \"mo\", \"sa\", \"me\", \"ru\", \"pi\", \"pe\", \"po\", \"sa\", \"re\", \"ru\", \"ri\", \"pa\", \"mo\", \"to\", \"ra\", \"fe\", \"fu\", \"ti\", \"ro\", \"pu\", \"mi\", \"so\", \"la\", \"pe\", \"pu\", \"mi\", \"ge\", \"fo\", \"fa\", \"le\", \"lu\", \"fi\", \"ra\", \"so\", \"ro\", \"fa\", \"le\", \"tu\", \"fi\", \"to\", \"ru\", \"ti\", \"to\", \"ma\", \"ge\", \"gu\", \"fi\", \"te\", \"lo\", \"la\", \"fe\", \"fu\", \"ti\", \"ga\", \"fo\", \"mo\", \"ta\", \"te\", \"mu\", \"gui\", \"fo\", \"mu\", \"fi\", \"fo\", \"fa\", \"fe\", \"mu\", \"ti\", \"le\", \"go\", \"ra\", \"te\", \"tu\", \"li\", \"ta\", \"ro\", \"go\", \"pa\", \"se\", \"su\", \"li\", \"so\", \"lu\", \"pi\", \"ro\", \"ta\", \"te\", \"su\", \"si\", \"se\", \"to\", \"ma\", \"se\", \"pu\", \"mi\", \"la\", \"po\", \"lo\", \"ga\", \"me\", \"pu\", \"pi\", \"po\", \"su\", \"li\", \"po\", \"ga\", \"se\", \"tu\", \"li\", \"me\", \"ro\", \"pa\", \"pe\", \"mu\", \"pi\", \"ma\", \"go\", \"po\", \"fu\", \"ta\", \"mi\", \"le\", \"po\", \"ge\", \"ma\", \"gu\", \"li\", \"to\", \"ge\", \"po\", \"li\", \"ga\", \"gu\", \"ru\", \"fe\", \"lo\", \"sa\", \"li\", \"pe\", \"tu\", \"ga\", \"pi\", \"se\", \"so\", \"pe\", \"ra\", \"fu\", \"ri\", \"fo\", \"me\", \"fo\", \"pi\", \"la\", \"pu\", \"gu\", \"se\", \"ro\", \"ra\", \"fi\", \"se\", \"gu\", \"la\", \"fi\", \"te\", \"go\", \"te\", \"fa\", \"pu\", \"si\", \"go\", \"re\", \"mo\", \"ti\", \"ra\", \"lu\", \"pu\", \"le\", \"po\", \"ga\", \"ri\", \"te\", \"pu\", \"sa\", \"gui\", \"ge\", \"lo\", \"le\", \"la\", \"su\", \"gui\", \"mo\", \"te\", \"lo\", \"gui\", \"sa\", \"tu\", \"mu\", \"te\", \"fo\", \"fa\", \"ti\", \"re\", \"su\", \"fa\", \"si\", \"re\", \"mo\", \"me\", \"ta\", \"tu\", \"pi\", \"so\", \"le\", \"ro\", \"mi\", \"ma\", \"mu\", \"tu\", \"me\", \"mo\", \"la\", \"mi\", \"fe\", \"mu\", \"pa\", \"ti\", \"pe\", \"fo\", \"fe\", \"sa\", \"ru\", \"mi\", \"lo\", \"se\", \"to\", \"fi\", \"fa\", \"fu\", \"fu\", \"re\", \"so\", \"ta\", \"si\", \"le\", \"lu\", \"ra\", \"li\", \"me\", \"ro\", \"se\", \"ga\", \"lu\", \"fi\", \"ro\", \"fe\", \"so\", \"si\", \"ta\", \"ru\", \"lu\", \"pe\", \"to\", \"ma\", \"pi\", \"ge\", \"ru\", \"ma\", \"ri\", \"fe\", \"to\", \"re\", \"pa\", \"mu\", \"ti\", \"po\", \"pe\", \"go\", \"ri\", \"pa\", \"su\", \"su\", \"ge\", \"go\", \"pa\", \"gui\", \"me\", \"ge\", \"ru\", \"ta\", \"ti\", \"mo\", \"su\", \"li\", \"me\", \"lo\", \"pa\", \"ge\", \"po\", \"mi\", \"pa\", \"fu\", \"ta\", \"te\", \"ti\", \"su\", \"mo\", \"po\", \"te\", \"su\", \"fa\", \"si\", \"lo\", \"fu\", \"pi\", \"ge\", \"mo\", \"fa\", \"pe\", \"go\", \"li\", \"ra\", \"su\", \"ma\", \"fe\", \"li\", \"mu\", \"to\", \"go\", \"fe\", \"fu\", \"ma\", \"ri\", \"to\", \"mu\", \"ti\", \"le\", \"po\", \"sa\", \"me\", \"so\", \"ri\", \"ga\", \"pu\", \"la\", \"ge\", \"mi\", \"gu\", \"lo\", \"fo\", \"re\", \"gu\", \"ra\", \"fi\", \"po\", \"gu\", \"gui\", \"re\", \"ro\", \"ta\", \"re\", \"to\", \"fi\", \"la\", \"tu\", \"sa\", \"me\", \"pi\", \"tu\", \"po\", \"to\", \"me\", \"tu\", \"pa\", \"pi\", \"so\", \"ru\", \"si\", \"te\", \"fo\", \"ma\", \"se\", \"mo\", \"pi\", \"sa\", \"ru\", \"fa\", \"re\", \"si\", \"ru\", \"fo\", \"ro\", \"se\", \"mu\", \"ga\", \"mi\", \"fo\", \"lu\", \"fi\", \"pe\", \"go\", \"la\", \"te\", \"lo\", \"gui\", \"fa\", \"mu\", \"ra\", \"se\", \"gui\", \"fu\", \"so\", \"so\", \"le\", \"pu\", \"sa\", \"gui\", \"go\", \"tu\", \"ri\", \"fe\", \"so\", \"ga\", \"fe\", \"fo\", \"si\", \"ma\", \"lu\", \"pa\", \"le\", \"fi\", \"lu\", \"go\", \"mo\", \"pe\", \"lu\", \"la\", \"li\", \"ro\", \"pu\", \"mi\", \"se\", \"to\", \"ra\", \"le\", \"ro\", \"ti\", \"ta\", \"gu\", \"ga\", \"pe\", \"ri\", \"pu\", \"ro\", \"lo\", \"lo\", \"gui\", \"la\", \"me\", \"gu\", \"se\", \"ri\", \"to\", \"ma\", \"tu\", \"mo\", \"pe\", \"ri\", \"fa\", \"lu\", \"sa\", \"ri\", \"se\", \"fu\", \"po\", \"ri\", \"ro\", \"si\", \"ra\", \"fe\", \"mu\", \"le\", \"ti\", \"lo\", \"pa\", \"gu\", \"lo\", \"se\", \"ti\", \"sa\", \"tu\", \"la\", \"gui\", \"ge\", \"su\", \"ro\", \"ti\", \"fo\", \"ri\", \"ga\", \"le\", \"lu\", \"ge\", \"gui\", \"go\", \"sa\", \"fu\", \"fo\", \"re\", \"fi\", \"ma\", \"ru\", \"ga\", \"ti\", \"re\", \"mu\", \"mo\", \"pi\", \"mo\", \"fi\", \"pa\", \"ge\", \"pu\", \"re\", \"li\", \"ro\", \"ra\", \"su\", \"to\", \"ge\", \"pi\", \"ga\", \"su\", \"pa\", \"pi\", \"fe\", \"ru\", \"lo\", \"li\", \"go\", \"pi\", \"ta\", \"pe\", \"su\", \"pe\", \"fi\", \"fo\", \"fa\", \"ru\", \"ro\", \"fe\", \"li\", \"ta\", \"pu\", \"ta\", \"mi\", \"te\", \"tu\", \"so\", \"fi\", \"to\", \"mi\", \"sa\", \"re\", \"ru\", \"te\", \"si\", \"mo\", \"ta\", \"pu\", \"po\", \"le\", \"si\", \"ra\", \"fu\", \"ra\", \"si\", \"pe\", \"gu\", \"to\", \"gui\", \"po\", \"li\", \"fa\", \"te\", \"tu\", \"fe\", \"pi\", \"so\", \"ga\", \"lu\", \"go\", \"me\", \"gui\", \"la\", \"gu\", \"ma\", \"fi\", \"me\", \"pu\", \"fo\", \"si\", \"so\", \"ti\", \"ma\", \"se\", \"fu\", \"me\", \"mi\", \"po\", \"la\", \"mu\", \"so\", \"te\", \"mi\", \"pa\", \"mu\", \"fa\", \"li\", \"le\", \"lu\", \"go\", \"mi\", \"me\", \"pa\", \"pu\", \"mi\", \"mo\", \"mo\", \"mu\", \"ma\", \"ge\", \"si\", \"su\", \"so\", \"ra\", \"mi\", \"ge\", \"fo\", \"fe\", \"ta\", \"ti\", \"fu\", \"ro\", \"re\", \"ra\", \"lu\", \"fi\", \"lo\", \"so\", \"fu\", \"ga\", \"se\", \"ri\", \"mu\", \"to\", \"ma\", \"si\", \"te\", \"ro\", \"le\", \"ra\", \"ri\", \"tu\", \"so\", \"ge\", \"ma\", \"su\", \"gui\", \"to\", \"to\", \"ru\", \"pa\", \"le\", \"ti\", \"pu\", \"fo\", \"la\", \"fi\", \"pe\", \"so\", \"re\", \"sa\", \"si\", \"pu\", \"fo\", \"se\", \"ta\", \"tu\", \"ri\", \"go\", \"lo\", \"pu\", \"la\", \"me\", \"gui\", \"fu\", \"go\", \"ta\", \"li\", \"se\", \"go\", \"pe\", \"pa\", \"gui\", \"mu\", \"po\", \"le\", \"fa\", \"ru\", \"li\", \"fo\", \"ro\", \"lu\", \"fa\", \"pe\", \"mi\", \"lu\", \"mo\", \"fa\", \"pi\", \"me\", \"to\", \"ge\", \"fa\", \"mi\", \"ru\", \"go\", \"te\", \"la\", \"fu\", \"si\", \"so\", \"fo\", \"tu\", \"sa\", \"te\", \"pi\", \"gu\", \"lo\", \"sa\", \"gui\", \"re\", \"lo\", \"se\", \"la\", \"pi\", \"gu\", \"mo\", \"fe\", \"ga\", \"gu\", \"ti\", \"po\", \"po\", \"gu\", \"ta\", \"re\", \"li\", \"tu\", \"po\", \"pa\", \"ri\", \"fe\", \"po\", \"me\", \"ga\", \"fi\", \"su\", \"to\", \"pe\", \"sa\", \"mu\", \"pi\", \"ro\", \"go\", \"su\", \"ra\", \"fe\", \"fi\", \"ru\", \"ro\", \"ga\", \"ti\", \"le\", \"mo\", \"te\", \"ma\", \"li\", \"lu\", \"lo\", \"lu\", \"ro\", \"ti\", \"sa\", \"fe\", \"me\", \"ta\", \"su\", \"so\", \"li\", \"fo\", \"mu\", \"ga\", \"gui\", \"se\", \"me\", \"fi\", \"gu\", \"ro\", \"la\", \"si\", \"su\", \"fo\", \"pi\", \"ta\", \"ge\", \"fe\", \"sa\", \"pu\", \"go\", \"pi\", \"mo\", \"lu\", \"fa\", \"li\", \"te\", \"re\", \"si\", \"su\", \"so\", \"ra\", \"gui\", \"ru\", \"so\", \"ri\", \"ma\", \"te\", \"ge\", \"la\", \"fu\", \"po\", \"ri\", \"to\", \"fu\", \"ta\", \"ri\", \"re\", \"te\", \"gui\", \"tu\", \"mo\", \"ta\", \"mi\", \"mu\", \"po\", \"fi\", \"la\", \"le\", \"te\", \"ma\", \"gu\", \"to\", \"si\", \"po\", \"tu\", \"pa\", \"ti\", \"le\", \"le\", \"ti\", \"ru\", \"po\", \"sa\", \"li\", \"pu\", \"go\", \"mi\", \"pa\", \"me\", \"pe\", \"fa\", \"ru\", \"mo\", \"fi\", \"go\", \"ru\", \"ra\", \"si\", \"fe\", \"ge\", \"mi\", \"lu\", \"lo\", \"ga\", \"ri\", \"fu\", \"to\", \"li\", \"ra\", \"pe\", \"re\", \"pa\", \"tu\", \"ro\", \"mi\", \"so\", \"su\", \"la\", \"pi\", \"ge\", \"se\", \"pi\", \"mu\", \"fo\", \"pa\", \"pi\", \"tu\", \"lo\", \"gui\", \"fa\", \"re\", \"le\", \"ra\", \"lu\", \"lo\", \"gui\", \"lo\", \"pu\", \"ma\", \"mi\", \"pe\", \"fe\", \"ri\", \"pu\", \"to\", \"fa\", \"ti\", \"gu\", \"mo\", \"si\", \"ga\", \"se\", \"se\", \"ga\", \"mu\", \"fo\", \"ti\", \"ro\", \"gu\", \"sa\", \"fi\", \"me\", \"pe\", \"li\", \"fu\", \"go\", \"ma\", \"fi\", \"tu\", \"pa\", \"me\", \"si\", \"to\", \"le\", \"ta\", \"lu\", \"so\", \"li\", \"se\", \"sa\", \"pu\", \"po\", \"ri\", \"go\", \"pa\", \"le\", \"su\", \"ri\", \"go\", \"gu\", \"ta\", \"ge\", \"pi\", \"go\", \"ge\", \"sa\", \"pu\", \"po\", \"si\", \"te\", \"pa\", \"su\", \"so\", \"gui\", \"fo\", \"fa\", \"me\", \"tu\", \"pi\", \"so\", \"fu\", \"ra\", \"le\", \"ri\", \"ro\", \"se\", \"la\", \"ru\", \"to\", \"pi\", \"ge\", \"ta\", \"fu\", \"to\", \"ti\", \"po\", \"ga\", \"te\", \"pu\", \"ti\", \"ro\", \"ru\", \"sa\", \"te\", \"ti\", \"so\", \"pe\", \"ra\", \"fu\", \"go\", \"ti\", \"pe\", \"ga\", \"gu\", \"ro\", \"fi\", \"to\", \"ma\", \"fe\", \"gu\", \"mi\", \"mo\", \"pu\", \"ga\", \"pe\", \"gui\", \"fo\", \"fe\", \"ga\", \"mu\", \"ro\", \"fi\", \"re\", \"fa\", \"lu\", \"mo\", \"si\", \"so\", \"ra\", \"re\", \"fu\", \"gui\", \"fo\", \"su\", \"ma\", \"fe\", \"mi\", \"mo\", \"re\", \"fa\", \"su\", \"mo\", \"gui\", \"me\", \"la\", \"mu\", \"go\", \"pi\", \"lo\", \"sa\", \"se\", \"lu\", \"li\", \"po\", \"lu\", \"fa\", \"se\", \"fi\", \"po\", \"te\", \"pa\", \"tu\", \"fo\", \"mi\", \"le\", \"ra\", \"tu\", \"fo\", \"li\", \"ro\", \"ta\", \"ge\", \"mu\", \"fi\", \"to\", \"mu\", \"la\", \"re\", \"li\", \"lo\", \"me\", \"ma\", \"gu\", \"lo\", \"ri\", \"fe\", \"ma\", \"ru\", \"lo\", \"mi\", \"mo\", \"la\", \"pe\", \"ru\", \"si\", \"lo\", \"ra\", \"fi\", \"te\", \"gu\", \"lo\", \"su\", \"fe\", \"so\", \"la\", \"ti\", \"ru\", \"pa\", \"fi\", \"pe\", \"po\", \"ta\", \"ri\", \"po\", \"lu\", \"pe\", \"ro\", \"fa\", \"li\", \"me\", \"su\", \"go\", \"tu\", \"ge\", \"ro\", \"ma\", \"si\", \"tu\", \"ra\", \"gui\", \"te\", \"to\", \"fa\", \"si\", \"ro\", \"mu\", \"re\", \"po\", \"ta\", \"pi\", \"fe\", \"ru\", \"mo\", \"ru\", \"me\", \"to\", \"ta\", \"fi\", \"gu\", \"fa\", \"si\", \"ge\", \"so\", \"sa\", \"li\", \"so\", \"pu\", \"me\", \"fo\", \"ma\", \"si\", \"pe\", \"lu\", \"po\", \"fu\", \"re\", \"po\", \"fa\", \"li\", \"mu\", \"sa\", \"pi\", \"se\", \"fo\", \"ma\", \"gui\", \"mo\", \"fu\", \"fe\", \"lo\", \"pa\", \"gui\", \"re\", \"fu\", \"so\", \"gu\", \"se\", \"go\", \"sa\", \"gui\", \"su\", \"la\", \"mi\", \"le\", \"ro\", \"la\", \"mi\", \"fo\", \"su\", \"te\", \"go\", \"ga\", \"ti\", \"se\", \"pu\", \"to\", \"mu\", \"le\", \"lo\", \"ra\", \"mi\", \"lu\", \"ta\", \"ri\", \"me\", \"lo\", \"ra\", \"fi\", \"to\", \"tu\", \"ge\", \"so\", \"la\", \"ri\", \"le\", \"tu\", \"fo\", \"lu\", \"te\", \"mo\", \"ga\", \"ri\", \"pu\", \"ma\", \"li\", \"fe\", \"mo\", \"ga\", \"pi\", \"go\", \"ru\", \"se\", \"mo\", \"sa\", \"mi\", \"ge\", \"mu\", \"ro\", \"pu\", \"pe\", \"fo\", \"pa\", \"pi\", \"fu\", \"ga\", \"ti\", \"re\", \"go\", \"pa\", \"ti\", \"lo\", \"gu\", \"le\", \"to\", \"ru\", \"ra\", \"po\", \"se\", \"pi\", \"te\", \"so\", \"ri\", \"lu\", \"sa\", \"pu\", \"si\", \"fa\", \"go\", \"se\", \"me\", \"su\", \"li\", \"la\", \"mo\", \"ta\", \"pu\", \"ga\", \"ro\", \"re\", \"ri\", \"se\", \"fo\", \"li\", \"tu\", \"fa\", \"mu\", \"pi\", \"pa\", \"mo\", \"te\", \"ge\", \"pu\", \"gui\", \"ga\", \"so\", \"ga\", \"lu\", \"sa\", \"fo\", \"fe\", \"li\", \"pe\", \"lo\", \"ti\", \"su\", \"ga\", \"ru\", \"li\", \"sa\", \"ro\", \"me\", \"fe\", \"lu\", \"fi\", \"pa\", \"ro\", \"la\", \"mu\", \"fa\", \"lo\", \"ge\", \"ti\", \"re\", \"ro\", \"fi\", \"gu\", \"ma\", \"fu\", \"ri\", \"ma\", \"so\", \"le\", \"se\", \"gu\", \"ti\", \"ma\", \"go\", \"ma\", \"tu\", \"pa\", \"go\", \"le\", \"gui\", \"me\", \"go\", \"gui\", \"ru\", \"ra\", \"su\", \"mi\", \"ra\", \"fo\", \"fe\", \"re\", \"fu\", \"mi\", \"fa\", \"po\", \"fa\", \"fu\", \"ma\", \"so\", \"te\", \"mi\", \"fe\", \"mo\", \"pi\", \"pu\", \"pa\", \"gu\", \"gui\", \"ta\", \"lo\", \"ge\", \"pe\", \"tu\", \"pi\", \"ra\", \"lo\", \"ra\", \"su\", \"la\", \"mo\", \"pe\", \"fi\", \"le\", \"to\", \"si\", \"mu\", \"la\", \"tu\", \"fi\", \"la\", \"po\", \"re\", \"le\", \"mu\", \"si\", \"sa\", \"fo\", \"sa\", \"gu\", \"ta\", \"to\", \"me\", \"si\", \"ge\", \"po\", \"mi\", \"fu\", \"ta\", \"lu\", \"ti\", \"ga\", \"to\", \"pe\", \"te\", \"ru\", \"ri\", \"ta\", \"to\", \"pa\", \"ge\", \"ro\", \"pa\", \"ri\", \"su\", \"mo\", \"mi\", \"la\", \"pu\", \"pe\", \"ta\", \"tu\", \"pe\", \"mo\", \"gui\", \"ri\", \"le\", \"ro\", \"mu\", \"ga\", \"to\", \"pe\", \"to\", \"ta\", \"li\", \"ru\", \"ro\", \"pi\", \"fa\", \"su\", \"se\", \"pa\", \"fu\", \"me\", \"so\", \"ti\", \"si\", \"se\", \"to\", \"fu\", \"la\", \"fo\", \"fe\", \"go\", \"ra\", \"ti\", \"fu\", \"po\", \"ti\", \"pa\", \"lu\", \"fe\", \"fa\", \"lu\", \"ge\", \"fo\", \"ri\", \"gui\", \"pe\", \"go\", \"tu\", \"sa\", \"po\", \"me\", \"lo\", \"la\", \"mi\", \"mu\", \"to\", \"fi\", \"ra\", \"tu\", \"le\", \"ga\", \"mu\", \"te\", \"lo\", \"si\", \"li\", \"re\", \"mo\", \"su\", \"ra\", \"so\", \"te\", \"so\", \"sa\", \"gui\", \"gu\", \"so\", \"gui\", \"ga\", \"mu\", \"te\", \"la\", \"ru\", \"fe\", \"po\", \"mi\", \"ti\", \"fe\", \"so\", \"ru\", \"fa\", \"ro\", \"se\", \"fo\", \"ga\", \"pi\", \"lu\", \"lo\", \"ri\", \"sa\", \"gu\", \"re\", \"sa\", \"gu\", \"se\", \"go\", \"pi\", \"pi\", \"ge\", \"po\", \"pu\", \"ma\", \"lo\", \"re\", \"mo\", \"ma\", \"fi\", \"pu\", \"go\", \"li\", \"ma\", \"ru\", \"ge\", \"ra\", \"pu\", \"le\", \"ro\", \"fi\", \"mi\", \"me\", \"lo\", \"lu\", \"pa\", \"go\", \"le\", \"po\", \"fa\", \"si\", \"tu\", \"fo\", \"si\", \"ta\", \"fu\", \"me\", \"ma\", \"su\", \"re\", \"to\", \"li\", \"fi\", \"te\", \"fo\", \"gu\", \"ta\", \"mo\", \"fu\", \"li\", \"fo\", \"se\", \"ra\", \"ti\", \"sa\", \"mu\", \"pe\", \"fo\", \"go\", \"ga\", \"mi\", \"fe\", \"ru\", \"to\", \"pe\", \"mu\", \"ta\", \"si\", \"so\", \"pu\", \"mi\", \"po\", \"ge\", \"ga\", \"ri\", \"ra\", \"tu\", \"fe\", \"lo\", \"ro\", \"fa\", \"si\", \"pe\", \"gu\", \"ro\", \"ge\", \"pu\", \"pa\", \"ri\", \"mo\", \"mu\", \"pi\", \"mo\", \"me\", \"la\", \"gui\", \"fa\", \"gu\", \"te\", \"to\", \"to\", \"ra\", \"li\", \"te\", \"fu\", \"go\", \"fe\", \"fu\", \"ra\", \"mi\", \"ro\", \"lu\", \"ti\", \"so\", \"le\", \"pa\", \"li\", \"ta\", \"su\", \"se\", \"go\", \"mo\", \"ta\", \"gui\", \"se\", \"su\", \"lo\", \"re\", \"lu\", \"fa\", \"li\", \"po\", \"su\", \"fi\", \"lo\", \"te\", \"fa\", \"mi\", \"pa\", \"ru\", \"ge\", \"ro\", \"lo\", \"la\", \"fi\", \"me\", \"tu\", \"so\", \"me\", \"ru\", \"sa\", \"ti\", \"to\", \"ru\", \"si\", \"ro\", \"fe\", \"ta\", \"fi\", \"ma\", \"pu\", \"re\", \"po\", \"po\", \"ma\", \"pi\", \"ge\", \"lu\", \"fo\", \"te\", \"tu\", \"ga\", \"fi\", \"lo\", \"gu\", \"gui\", \"go\", \"re\", \"sa\", \"pi\", \"ga\", \"fu\", \"le\", \"so\", \"fo\", \"pa\", \"ti\", \"le\", \"pu\", \"po\", \"se\", \"su\", \"la\", \"gui\", \"go\", \"tu\", \"ri\", \"to\", \"pe\", \"ma\", \"si\", \"la\", \"lu\", \"me\", \"mo\", \"so\", \"sa\", \"ri\", \"re\", \"mu\", \"mo\", \"le\", \"gu\", \"ma\", \"pi\", \"fo\", \"pa\", \"le\", \"tu\", \"mo\", \"fi\", \"pi\", \"la\", \"fu\", \"ge\", \"mo\", \"pa\", \"to\", \"si\", \"fu\", \"re\", \"fu\", \"fe\", \"sa\", \"pi\", \"so\", \"pe\", \"la\", \"pe\", \"su\", \"po\", \"ti\", \"mi\", \"pa\", \"tu\", \"pe\", \"po\", \"ga\", \"so\", \"fi\", \"ru\", \"le\", \"su\", \"le\", \"la\", \"ti\", \"po\", \"se\", \"ma\", \"fe\", \"mu\", \"ro\", \"si\", \"ri\", \"ga\", \"ru\", \"re\", \"fo\", \"ma\", \"fo\", \"ti\", \"gu\", \"fe\", \"pu\", \"me\", \"fa\", \"si\", \"go\", \"ge\", \"sa\", \"me\", \"lu\", \"so\", \"gui\", \"si\", \"ra\", \"su\", \"te\", \"lo\", \"ra\", \"mo\", \"mi\", \"pu\", \"ge\", \"tu\", \"te\", \"ta\", \"mi\", \"lo\", \"te\", \"fa\", \"re\", \"fu\", \"go\", \"mi\", \"gui\", \"fa\", \"lu\", \"fe\", \"go\", \"la\", \"ro\", \"gui\", \"lu\", \"te\", \"mu\", \"pe\", \"ga\", \"gui\", \"mo\", \"re\", \"ga\", \"te\", \"pu\", \"fo\", \"ri\", \"li\", \"sa\", \"pu\", \"le\", \"ro\", \"fa\", \"po\", \"ri\", \"mu\", \"se\", \"ru\", \"ge\", \"pa\", \"ri\", \"ro\", \"le\", \"ta\", \"se\", \"gu\", \"lo\", \"li\", \"ti\", \"ma\", \"gu\", \"se\", \"to\", \"sa\", \"lo\", \"pi\", \"su\", \"pe\", \"gu\", \"se\", \"ra\", \"li\", \"fo\", \"fe\", \"ra\", \"ge\", \"ru\", \"to\", \"pi\", \"fi\", \"ta\", \"mu\", \"me\", \"so\", \"ta\", \"go\", \"li\", \"tu\", \"me\", \"lu\", \"re\", \"ma\", \"fi\", \"to\", \"me\", \"mo\", \"ru\", \"pa\", \"pe\", \"fi\", \"fu\", \"ri\", \"to\", \"me\", \"ga\", \"pi\", \"po\", \"pe\", \"ru\", \"fa\", \"se\", \"ru\", \"pa\", \"ri\", \"go\", \"pa\", \"lo\", \"tu\", \"fa\", \"le\", \"si\", \"pu\", \"pi\", \"ro\", \"ge\", \"ma\", \"ri\", \"lo\", \"re\", \"tu\", \"ga\", \"te\", \"gu\", \"ra\", \"ti\", \"mo\", \"la\", \"to\", \"pu\", \"sa\", \"te\", \"mi\", \"mu\", \"ti\", \"go\", \"re\", \"ra\", \"ti\", \"to\", \"te\", \"pu\", \"ma\", \"le\", \"tu\", \"fa\", \"mi\", \"to\", \"ta\", \"ro\", \"gu\", \"ga\", \"se\", \"ti\", \"ru\", \"mi\", \"mo\", \"le\", \"la\", \"si\", \"ro\", \"le\", \"fu\", \"la\", \"fe\", \"su\", \"ta\", \"si\", \"lo\", \"ma\", \"fo\", \"mu\", \"la\", \"ge\", \"ri\", \"su\", \"gui\", \"po\", \"pe\", \"sa\", \"mi\", \"so\", \"ge\", \"mu\", \"ra\", \"me\", \"fu\", \"ga\", \"li\", \"so\", \"fa\", \"go\", \"fu\", \"ta\", \"me\", \"gui\", \"tu\", \"si\", \"so\", \"te\", \"fa\", \"li\", \"mo\", \"fe\", \"lu\", \"ta\", \"pe\", \"mu\", \"la\", \"fi\", \"ro\", \"ga\", \"po\", \"lu\", \"ma\", \"fe\", \"pi\", \"lu\", \"li\", \"lo\", \"se\", \"ta\", \"gui\", \"fo\", \"me\", \"su\", \"pa\", \"ge\", \"pu\", \"ma\", \"pi\", \"fo\", \"ra\", \"so\", \"su\", \"ra\", \"re\", \"li\", \"gu\", \"fi\", \"fo\", \"fe\", \"pa\", \"fi\", \"go\", \"se\", \"gu\", \"sa\", \"re\", \"lu\", \"sa\", \"gui\", \"po\", \"sa\", \"tu\", \"gui\", \"to\", \"ga\", \"me\", \"ma\", \"so\", \"ti\", \"su\", \"me\", \"sa\", \"go\", \"ru\", \"fi\", \"le\", \"pu\", \"le\", \"ta\", \"si\", \"go\", \"fo\", \"ru\", \"ti\", \"ro\", \"ma\", \"fe\", \"fa\", \"po\", \"ri\", \"ru\", \"te\", \"ga\", \"ro\", \"gu\", \"mi\", \"te\", \"tu\", \"re\", \"ra\", \"mi\", \"mo\", \"to\", \"gu\", \"li\", \"fo\", \"ta\", \"re\", \"ta\", \"mo\", \"fi\", \"mu\", \"re\", \"ta\", \"po\", \"tu\", \"pi\", \"me\", \"lu\", \"pe\", \"sa\", \"pi\", \"ro\", \"po\", \"pu\", \"pi\", \"mo\", \"sa\", \"le\", \"sa\", \"to\", \"si\", \"lu\", \"se\", \"fa\", \"mo\", \"lu\", \"ti\", \"pe\", \"fu\", \"se\", \"fa\", \"ri\", \"lo\", \"lo\", \"su\", \"ri\", \"so\", \"pa\", \"ge\", \"pa\", \"fo\", \"pi\", \"tu\", \"pe\", \"pa\", \"so\", \"mu\", \"ri\", \"ge\", \"ru\", \"fe\", \"ma\", \"li\", \"fo\", \"ro\", \"fu\", \"si\", \"po\", \"fa\", \"pe\", \"la\", \"go\", \"li\", \"fu\", \"ge\", \"la\", \"fo\", \"fu\", \"gui\", \"fe\", \"mu\", \"me\", \"pa\", \"ti\", \"to\", \"go\", \"lu\", \"fi\", \"go\", \"ra\", \"te\", \"ga\", \"ro\", \"gui\", \"gu\", \"le\", \"ra\", \"lo\", \"su\", \"si\", \"se\", \"gu\", \"te\", \"la\", \"gui\", \"so\", \"so\", \"mu\", \"mi\", \"lo\", \"la\", \"se\", \"ra\", \"lo\", \"mi\", \"pu\", \"fe\", \"ma\", \"to\", \"pu\", \"li\", \"re\", \"su\", \"ge\", \"ga\", \"fi\", \"po\", \"mo\", \"so\", \"tu\", \"re\", \"mi\", \"ra\", \"su\", \"pa\", \"to\", \"fi\", \"le\", \"li\", \"mu\", \"fa\", \"me\", \"mo\", \"fo\", \"pi\", \"se\", \"ma\", \"fu\", \"ru\", \"po\", \"su\", \"me\", \"ti\", \"fa\", \"lu\", \"ma\", \"lo\", \"ri\", \"se\", \"gui\", \"tu\", \"ra\", \"te\", \"to\", \"to\", \"fi\", \"re\", \"fa\", \"gu\", \"lu\", \"lo\", \"gu\", \"le\", \"li\", \"sa\", \"fu\", \"la\", \"so\", \"si\", \"re\", \"pi\", \"su\", \"ma\", \"ge\", \"ro\", \"po\", \"gui\", \"le\", \"ra\", \"ru\", \"mu\", \"fo\", \"ru\", \"pe\", \"pi\", \"la\", \"gu\", \"sa\", \"go\", \"pi\", \"pe\", \"ti\", \"gu\", \"sa\", \"re\", \"fo\", \"lo\", \"ri\", \"pe\", \"ta\", \"pu\", \"su\", \"to\", \"pu\", \"se\", \"fi\", \"ta\", \"ru\", \"ta\", \"fo\", \"li\", \"ge\", \"fi\", \"fu\", \"ga\", \"le\", \"po\", \"ro\", \"ti\", \"me\", \"ga\", \"lu\", \"fu\", \"go\", \"fu\", \"fe\", \"gui\", \"pa\", \"mu\", \"fa\", \"ro\", \"ti\", \"me\", \"si\", \"lu\", \"ta\", \"fe\", \"so\", \"go\", \"li\", \"fe\", \"la\", \"su\", \"tu\", \"mo\", \"lu\", \"te\", \"ri\", \"ma\", \"tu\", \"ga\", \"po\", \"gui\", \"te\", \"ri\", \"ru\", \"la\", \"se\", \"lo\", \"so\", \"mi\", \"ge\", \"pa\", \"mu\", \"gu\", \"ro\", \"mu\", \"ge\", \"si\", \"ga\", \"pu\", \"ra\", \"mo\", \"mi\", \"fe\", \"mi\", \"pu\", \"pa\", \"pe\", \"go\", \"mo\", \"si\", \"te\", \"sa\", \"tu\", \"pu\", \"ro\", \"te\", \"ti\", \"sa\", \"su\", \"fi\", \"pu\", \"ta\", \"to\", \"me\", \"lo\", \"mi\", \"ge\", \"ru\", \"ga\", \"pi\", \"ra\", \"po\", \"se\", \"gu\", \"ro\", \"go\", \"pe\", \"ri\", \"ga\", \"ru\", \"gui\", \"mu\", \"ma\", \"po\", \"te\", \"so\", \"si\", \"pe\", \"mu\", \"pa\", \"ti\", \"ta\", \"so\", \"le\", \"tu\", \"to\", \"lo\", \"fe\", \"fi\", \"la\", \"lu\", \"si\", \"su\", \"fa\", \"ro\", \"re\", \"mo\", \"li\", \"me\", \"gu\", \"ta\", \"li\", \"ma\", \"fo\", \"re\", \"mu\", \"lo\", \"mo\", \"ge\", \"gui\", \"ra\", \"fu\", \"ri\", \"tu\", \"ga\", \"lo\", \"pe\", \"fo\", \"ti\", \"le\", \"lu\", \"fa\", \"gui\", \"sa\", \"go\", \"ge\", \"su\", \"fo\", \"to\", \"le\", \"pi\", \"ta\", \"tu\", \"ti\", \"gu\", \"ra\", \"fo\", \"ge\", \"to\", \"fi\", \"re\", \"fu\", \"ma\", \"fi\", \"la\", \"to\", \"me\", \"ru\", \"go\", \"so\", \"se\", \"mi\", \"pa\", \"gu\", \"mi\", \"ru\", \"sa\", \"so\", \"le\", \"ro\", \"ri\", \"se\", \"pu\", \"sa\", \"mi\", \"pa\", \"mo\", \"fe\", \"pu\", \"mo\", \"po\", \"re\", \"si\", \"fa\", \"pu\", \"li\", \"lu\", \"la\", \"mo\", \"fe\", \"go\", \"gui\", \"te\", \"tu\", \"ra\", \"si\", \"fa\", \"ro\", \"pe\", \"fu\", \"po\", \"fo\", \"me\", \"li\", \"ma\", \"mu\", \"pi\", \"fu\", \"pa\", \"go\", \"se\", \"po\", \"pi\", \"fe\", \"su\", \"la\", \"ri\", \"ga\", \"lo\", \"te\", \"lu\", \"so\", \"ri\", \"fa\", \"fu\", \"te\", \"lo\", \"mi\", \"ra\", \"te\", \"lo\", \"su\", \"ga\", \"lo\", \"fe\", \"gui\", \"tu\", \"pe\", \"pu\", \"fo\", \"ti\", \"sa\", \"si\", \"si\", \"ra\", \"tu\", \"ge\", \"mo\", \"pi\", \"la\", \"le\", \"mo\", \"tu\", \"ma\", \"ro\", \"pe\", \"li\", \"gu\", \"ge\", \"fu\", \"lo\", \"fi\", \"fa\", \"fi\", \"ti\", \"ta\", \"su\", \"fe\", \"so\", \"ri\", \"fa\", \"re\", \"po\", \"mu\", \"fa\", \"fo\", \"le\", \"pi\", \"lu\", \"fe\", \"su\", \"ro\", \"mi\", \"pa\", \"pi\", \"fi\", \"ga\", \"mu\", \"se\", \"fo\", \"ti\", \"sa\", \"se\", \"so\", \"gu\", \"pa\", \"so\", \"re\", \"si\", \"ru\", \"re\", \"tu\", \"mo\", \"li\", \"la\", \"ri\", \"pi\", \"pa\", \"lu\", \"le\", \"po\", \"li\", \"ma\", \"me\", \"fo\", \"fu\", \"ta\", \"to\", \"ge\", \"fi\", \"fu\", \"te\", \"ru\", \"po\", \"gui\", \"ga\", \"mi\", \"li\", \"sa\", \"pu\", \"pe\", \"to\", \"si\", \"ta\", \"fe\", \"to\", \"ru\", \"la\", \"mo\", \"me\", \"mi\", \"pu\", \"le\", \"mu\", \"to\", \"si\", \"ma\", \"li\", \"mi\", \"ma\", \"gu\", \"me\", \"ro\", \"fi\", \"ga\", \"ge\", \"ro\", \"pu\", \"ra\", \"go\", \"te\", \"ti\", \"su\", \"se\", \"lu\", \"go\", \"pi\", \"ra\", \"ti\", \"gui\", \"la\", \"ru\", \"re\", \"go\", \"gui\", \"pa\", \"pe\", \"go\", \"lu\", \"sa\", \"po\", \"se\", \"ri\", \"mu\", \"me\", \"gu\", \"so\", \"ri\", \"ta\", \"gui\"]\n\nsyllables = [\"toh\"]\n\nunique_syllables = list(dict.fromkeys(syllables))\n\nfor x in unique_syllables:\n tts = gTTS(x, lang='es')\n filename = \"%s.mp3\" % x\n tts.save(filename)\n\n",
"from gtts import *\nsyllables = ['toh']\nunique_syllables = list(dict.fromkeys(syllables))\nfor x in unique_syllables:\n tts = gTTS(x, lang='es')\n filename = '%s.mp3' % x\n tts.save(filename)\n",
"<import token>\nsyllables = ['toh']\nunique_syllables = list(dict.fromkeys(syllables))\nfor x in unique_syllables:\n tts = gTTS(x, lang='es')\n filename = '%s.mp3' % x\n tts.save(filename)\n",
"<import token>\n<assignment token>\nfor x in unique_syllables:\n tts = gTTS(x, lang='es')\n filename = '%s.mp3' % x\n tts.save(filename)\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
99,067 |
922604603455919976ba8f7452490a979b1d5db9
|
#!/usr/bin/env python
import feedparser
import Tkinter
import ttk
import tkMessageBox
import tkHyperlinkManager
import webbrowser
class App(ttk.Frame):
def __init__(self, parent):
self.root = parent
ttk.Frame.__init__(self, parent)
self.parent = parent
self.feedslist = []
self.feedstitles = []
self.readFeedsFromFile()
self.initUI()
def initUI(self):
self.currentarticleindex = 0
self.menu = Tkinter.Menu(self.parent)
self.root.config(menu = self.menu)
self.fileMenu = Tkinter.Menu(self.menu)
self.menu.add_cascade(label="File", menu = self.fileMenu)
self.fileMenu.add_command(label="Import new feed...", command=self.hello)
self.fileMenu.add_separator()
self.fileMenu.add_command(label="Exit", command=self.root.destroy)
self.parent.title("KLOPyRSSReader2013")
self.pack(fill = Tkinter.BOTH, expand=1)
self.style = ttk.Style()
self.root.minsize(600,400)
ttk.Style.configure(self.style, "TFrame", background="#333")
self.lb = Tkinter.Listbox(self, width=36)
#bind doubleclick to onDouble-function
self.lb.bind("<Double-Button-1>", self.onDouble)
self.lb.grid(row = 0, column = 0, sticky=Tkinter.NW)
self.textbox = Tkinter.Text(self, width=48, wrap=Tkinter.WORD, state=Tkinter.DISABLED)
self.textbox.grid(row = 0, column = 1, columnspan=4, rowspan=4, sticky=Tkinter.NW)
#self.textbox.insert(0.0, "Hello world!")
self.hyperlinkManager = tkHyperlinkManager.HyperlinkManager(self.textbox)
self.currentfeed = ""
self.currentfeedarticles = "(N/A)"
self.previousbutton = Tkinter.Button(self, text="Previous article", command=self.previousButtonClick)
self.previousbutton.grid(row=4, column=1, sticky=Tkinter.W)
self.label = Tkinter.Label(self, text =" ")
self.label.grid(row=4, column=2)
self.nextbutton = Tkinter.Button(self, text="Next article", command=self.nextButtonClick)
self.nextbutton.grid(row=4, column=4, sticky=Tkinter.E)
self.deletefeedbutton = Tkinter.Button(self, text="Delete selcted feed", command = lambda: self.deleteFeed(self.lb.curselection()[0]))
self.deletefeedbutton.grid(row = 4, column = 0, sticky = Tkinter.W)
for i in self.feedslist:
d = feedparser.parse(i)
self.feedstitles.append(d.feed.title)
#when inserting feeds into the listbox, use the user-given name instead of URL
#we could also use the title of the feed, maybe that's even better
for i in self.feedstitles:
self.lb.insert(Tkinter.END, i)
def hello(self):
top = self.top = Tkinter.Toplevel(self)
Tkinter.Label(top, text="URL:").pack()
self.e = Tkinter.Entry(top, text="default")
self.e.pack(padx=5)
b = Tkinter.Button(top, text="OK", command=self.ok)
c = Tkinter.Button(top, text="Cancel", command = self.cancel)
b.pack()
c.pack()
def ok(self):
feed = self.e.get()
print "value is " + feed
self.insertFeedToFile(feed)
self.top.destroy()
def cancel(self):
self.top.destroy()
def readFeedsFromFile(self):
with open("feeds.txt") as f:
for line in f.readlines():
string = line.lstrip()
if (string[0] == '#'):
continue
self.feedslist.append(string)
def deleteFeed(self, index):
indx = int(index)
url = self.feedslist.pop(indx)
self.feedstitles.pop(indx)
f = open("feeds.txt", "r")
lines = f.readlines()
f.close()
f = open("feeds.txt", "w")
for line in lines:
if line != url:
f.write(line)
f.close()
self.refreshFeedsList()
def insertFeedToFile(self, feed):
self.feedslist.append(feed)
with open("feeds.txt", "a") as f:
f.write(feed)
self.refreshFeedsList()
def refreshFeedsList(self):
self.lb.delete(0, Tkinter.END)
self.feedstitles = []
for i in self.feedslist:
d = feedparser.parse(i)
self.feedstitles.append(d.feed.title)
for i in self.feedstitles:
self.lb.insert(Tkinter.END, i)
#refreshes the feed by selecting it and sets the current index to 0
def onDouble(self, event):
self.currentarticleindex = 0
widget = event.widget
selection = widget.curselection()
index = selection[0]
self.currentfeed = feedparser.parse(self.feedslist[int(index)])
self.currentfeedarticles = len(self.currentfeed.entries)
self.label.config(text = str(self.currentarticleindex + 1) + "/" + str(self.currentfeedarticles))
self.loadArticle()
def linkClick(self, link):
webbrowser.open(link)
def loadArticle(self):
#hack to fix if we somehow read beyond the articles that exist in the feed
if self.currentarticleindex + 1 > len(self.currentfeed.entries):
self.currentarticleindex = len(self.currentfeed.entries) - 1
title = self.currentfeed.entries[self.currentarticleindex].title
link = self.currentfeed.entries[self.currentarticleindex].link
description = self.currentfeed.entries[self.currentarticleindex].description
description = description.replace("’", "\'").replace("–", "-")
self.textbox.config(state=Tkinter.NORMAL)
self.textbox.delete(1.0, Tkinter.END)
self.textbox.insert(Tkinter.END, title, self.hyperlinkManager.add(lambda: self.linkClick(link)))
self.textbox.insert(Tkinter.END, "\r\n\r\n")
self.textbox.insert(Tkinter.END, description)
self.textbox.config(state=Tkinter.DISABLED)
def nextButtonClick(self):
self.currentarticleindex = self.currentarticleindex + 1
self.label.config(text = str(self.currentarticleindex + 1) + "/" + str(self.currentfeedarticles))
self.loadArticle()
def previousButtonClick(self):
if self.currentarticleindex == 0:
return
else:
self.currentarticleindex = self.currentarticleindex - 1
self.loadArticle()
self.label.config(text = str(self.currentarticleindex + 1) + "/" + str(self.currentfeedarticles))
if __name__ == "__main__":
#d = feedparser.parse(feedsList[0])
#print "Title: " + d.entries[0].title
#print "Link: " + d.entries[0].link
#print "Desc: " + d.entries[0].description.replace("’", "\'").replace("&", "&")
#print "Published: " + d.entries[0].published
#print "Updated: " + d.entries[0].updated
#print "Id " + d.entries[0].id
root = Tkinter.Tk()
app = App(root)
root.mainloop()
|
[
"#!/usr/bin/env python\n\nimport feedparser\nimport Tkinter\nimport ttk\nimport tkMessageBox\nimport tkHyperlinkManager\nimport webbrowser\n\nclass App(ttk.Frame):\n def __init__(self, parent):\n self.root = parent\n ttk.Frame.__init__(self, parent)\n self.parent = parent\n self.feedslist = []\n self.feedstitles = []\n self.readFeedsFromFile()\n self.initUI()\n \n def initUI(self):\n self.currentarticleindex = 0\n self.menu = Tkinter.Menu(self.parent)\n self.root.config(menu = self.menu)\n self.fileMenu = Tkinter.Menu(self.menu)\n self.menu.add_cascade(label=\"File\", menu = self.fileMenu)\n self.fileMenu.add_command(label=\"Import new feed...\", command=self.hello)\n self.fileMenu.add_separator()\n self.fileMenu.add_command(label=\"Exit\", command=self.root.destroy)\n self.parent.title(\"KLOPyRSSReader2013\")\n self.pack(fill = Tkinter.BOTH, expand=1)\n self.style = ttk.Style()\n self.root.minsize(600,400)\n ttk.Style.configure(self.style, \"TFrame\", background=\"#333\")\n \n self.lb = Tkinter.Listbox(self, width=36)\n #bind doubleclick to onDouble-function\n self.lb.bind(\"<Double-Button-1>\", self.onDouble)\n self.lb.grid(row = 0, column = 0, sticky=Tkinter.NW)\n \n self.textbox = Tkinter.Text(self, width=48, wrap=Tkinter.WORD, state=Tkinter.DISABLED)\n self.textbox.grid(row = 0, column = 1, columnspan=4, rowspan=4, sticky=Tkinter.NW)\n #self.textbox.insert(0.0, \"Hello world!\")\n\n self.hyperlinkManager = tkHyperlinkManager.HyperlinkManager(self.textbox)\n\n self.currentfeed = \"\"\n self.currentfeedarticles = \"(N/A)\"\n\n self.previousbutton = Tkinter.Button(self, text=\"Previous article\", command=self.previousButtonClick)\n self.previousbutton.grid(row=4, column=1, sticky=Tkinter.W)\n\n self.label = Tkinter.Label(self, text =\" \")\n self.label.grid(row=4, column=2)\n\n self.nextbutton = Tkinter.Button(self, text=\"Next article\", command=self.nextButtonClick)\n self.nextbutton.grid(row=4, column=4, sticky=Tkinter.E)\n\n self.deletefeedbutton = Tkinter.Button(self, text=\"Delete selcted feed\", command = lambda: self.deleteFeed(self.lb.curselection()[0]))\n self.deletefeedbutton.grid(row = 4, column = 0, sticky = Tkinter.W)\n\n for i in self.feedslist:\n d = feedparser.parse(i)\n self.feedstitles.append(d.feed.title)\n\n #when inserting feeds into the listbox, use the user-given name instead of URL\n #we could also use the title of the feed, maybe that's even better\n for i in self.feedstitles:\n self.lb.insert(Tkinter.END, i)\n\n def hello(self):\n top = self.top = Tkinter.Toplevel(self)\n\n Tkinter.Label(top, text=\"URL:\").pack()\n \n self.e = Tkinter.Entry(top, text=\"default\")\n self.e.pack(padx=5)\n\n b = Tkinter.Button(top, text=\"OK\", command=self.ok)\n c = Tkinter.Button(top, text=\"Cancel\", command = self.cancel)\n b.pack()\n c.pack()\n\n def ok(self):\n feed = self.e.get()\n print \"value is \" + feed\n \n self.insertFeedToFile(feed)\n\n self.top.destroy()\n\n def cancel(self):\n self.top.destroy()\n \n def readFeedsFromFile(self):\n with open(\"feeds.txt\") as f:\n for line in f.readlines():\n string = line.lstrip()\n if (string[0] == '#'):\n continue\n self.feedslist.append(string)\n\n def deleteFeed(self, index):\n indx = int(index)\n url = self.feedslist.pop(indx)\n self.feedstitles.pop(indx)\n\n f = open(\"feeds.txt\", \"r\")\n lines = f.readlines()\n f.close()\n \n\n f = open(\"feeds.txt\", \"w\")\n for line in lines:\n if line != url:\n f.write(line)\n f.close()\n\n self.refreshFeedsList()\n\n def insertFeedToFile(self, feed):\n self.feedslist.append(feed)\n with open(\"feeds.txt\", \"a\") as f:\n f.write(feed)\n\n self.refreshFeedsList()\n\n def refreshFeedsList(self):\n self.lb.delete(0, Tkinter.END)\n self.feedstitles = []\n for i in self.feedslist:\n d = feedparser.parse(i)\n self.feedstitles.append(d.feed.title)\n\n for i in self.feedstitles:\n self.lb.insert(Tkinter.END, i)\n \n #refreshes the feed by selecting it and sets the current index to 0\n def onDouble(self, event):\n self.currentarticleindex = 0\n widget = event.widget\n selection = widget.curselection()\n index = selection[0]\n self.currentfeed = feedparser.parse(self.feedslist[int(index)])\n self.currentfeedarticles = len(self.currentfeed.entries)\n self.label.config(text = str(self.currentarticleindex + 1) + \"/\" + str(self.currentfeedarticles))\n self.loadArticle()\n\n\n def linkClick(self, link):\n webbrowser.open(link)\n\n def loadArticle(self):\n #hack to fix if we somehow read beyond the articles that exist in the feed\n if self.currentarticleindex + 1 > len(self.currentfeed.entries):\n self.currentarticleindex = len(self.currentfeed.entries) - 1\n\n title = self.currentfeed.entries[self.currentarticleindex].title\n link = self.currentfeed.entries[self.currentarticleindex].link\n description = self.currentfeed.entries[self.currentarticleindex].description\n description = description.replace(\"’\", \"\\'\").replace(\"–\", \"-\")\n self.textbox.config(state=Tkinter.NORMAL)\n self.textbox.delete(1.0, Tkinter.END)\n self.textbox.insert(Tkinter.END, title, self.hyperlinkManager.add(lambda: self.linkClick(link)))\n self.textbox.insert(Tkinter.END, \"\\r\\n\\r\\n\")\n self.textbox.insert(Tkinter.END, description)\n self.textbox.config(state=Tkinter.DISABLED)\n\n def nextButtonClick(self):\n self.currentarticleindex = self.currentarticleindex + 1\n self.label.config(text = str(self.currentarticleindex + 1) + \"/\" + str(self.currentfeedarticles))\n self.loadArticle()\n\n def previousButtonClick(self):\n if self.currentarticleindex == 0:\n return\n else:\n self.currentarticleindex = self.currentarticleindex - 1\n self.loadArticle()\n self.label.config(text = str(self.currentarticleindex + 1) + \"/\" + str(self.currentfeedarticles))\n\nif __name__ == \"__main__\":\n #d = feedparser.parse(feedsList[0])\n #print \"Title: \" + d.entries[0].title\n #print \"Link: \" + d.entries[0].link\n #print \"Desc: \" + d.entries[0].description.replace(\"’\", \"\\'\").replace(\"&\", \"&\")\n #print \"Published: \" + d.entries[0].published\n #print \"Updated: \" + d.entries[0].updated\n #print \"Id \" + d.entries[0].id\n root = Tkinter.Tk()\n app = App(root)\n root.mainloop()\n\n"
] | true |
99,068 |
90e58011d1fa8a92c44a559e3f027bf8dad12600
|
import pyglet
pyglet.resource.path = ['resources']
pyglet.resource.reindex()
player_image = pyglet.resource.image("player.png")
bullet_image = pyglet.resource.image("bullet.png")
asteroid_image = pyglet.resource.image("asteroid.png")
def center_image(image):
"""Sets an image's anchor point to its center"""
image.anchor_x = image.width/2
image.anchor_y = image.height/2
center_image(player_image)
center_image(bullet_image)
center_image(asteroid_image)
score_label = pyglet.text.Label(text="Score: 0", x=10, y=575)
level_label = pyglet.text.Label(text="My Amazing Game",
x=400, y=575, anchor_x='center')
player_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300)
|
[
"import pyglet\npyglet.resource.path = ['resources']\npyglet.resource.reindex()\n\nplayer_image = pyglet.resource.image(\"player.png\")\nbullet_image = pyglet.resource.image(\"bullet.png\")\nasteroid_image = pyglet.resource.image(\"asteroid.png\")\n\ndef center_image(image):\n \"\"\"Sets an image's anchor point to its center\"\"\"\n image.anchor_x = image.width/2\n image.anchor_y = image.height/2\n\ncenter_image(player_image)\ncenter_image(bullet_image)\ncenter_image(asteroid_image)\n\nscore_label = pyglet.text.Label(text=\"Score: 0\", x=10, y=575)\nlevel_label = pyglet.text.Label(text=\"My Amazing Game\", \n x=400, y=575, anchor_x='center')\n\nplayer_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300)",
"import pyglet\npyglet.resource.path = ['resources']\npyglet.resource.reindex()\nplayer_image = pyglet.resource.image('player.png')\nbullet_image = pyglet.resource.image('bullet.png')\nasteroid_image = pyglet.resource.image('asteroid.png')\n\n\ndef center_image(image):\n \"\"\"Sets an image's anchor point to its center\"\"\"\n image.anchor_x = image.width / 2\n image.anchor_y = image.height / 2\n\n\ncenter_image(player_image)\ncenter_image(bullet_image)\ncenter_image(asteroid_image)\nscore_label = pyglet.text.Label(text='Score: 0', x=10, y=575)\nlevel_label = pyglet.text.Label(text='My Amazing Game', x=400, y=575,\n anchor_x='center')\nplayer_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300)\n",
"<import token>\npyglet.resource.path = ['resources']\npyglet.resource.reindex()\nplayer_image = pyglet.resource.image('player.png')\nbullet_image = pyglet.resource.image('bullet.png')\nasteroid_image = pyglet.resource.image('asteroid.png')\n\n\ndef center_image(image):\n \"\"\"Sets an image's anchor point to its center\"\"\"\n image.anchor_x = image.width / 2\n image.anchor_y = image.height / 2\n\n\ncenter_image(player_image)\ncenter_image(bullet_image)\ncenter_image(asteroid_image)\nscore_label = pyglet.text.Label(text='Score: 0', x=10, y=575)\nlevel_label = pyglet.text.Label(text='My Amazing Game', x=400, y=575,\n anchor_x='center')\nplayer_ship = pyglet.sprite.Sprite(img=player_image, x=400, y=300)\n",
"<import token>\n<assignment token>\npyglet.resource.reindex()\n<assignment token>\n\n\ndef center_image(image):\n \"\"\"Sets an image's anchor point to its center\"\"\"\n image.anchor_x = image.width / 2\n image.anchor_y = image.height / 2\n\n\ncenter_image(player_image)\ncenter_image(bullet_image)\ncenter_image(asteroid_image)\n<assignment token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n\n\ndef center_image(image):\n \"\"\"Sets an image's anchor point to its center\"\"\"\n image.anchor_x = image.width / 2\n image.anchor_y = image.height / 2\n\n\n<code token>\n<assignment token>\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<function token>\n<code token>\n<assignment token>\n"
] | false |
99,069 |
ed6445c0bd134eb7ce0ba48133e279bed13c3fb8
|
import requests
class LoginApi:
def __init__(self):
self.login_url="http://182.92.81.159/api/sys/login"
pass
def login(self,mobile,password):
jsonData={
"mobile":mobile,
"password":password
}
return requests.post(self.login_url,json=jsonData)
|
[
"import requests\nclass LoginApi:\n def __init__(self):\n self.login_url=\"http://182.92.81.159/api/sys/login\"\n pass\n\n def login(self,mobile,password):\n jsonData={\n \"mobile\":mobile,\n \"password\":password\n }\n return requests.post(self.login_url,json=jsonData)",
"import requests\n\n\nclass LoginApi:\n\n def __init__(self):\n self.login_url = 'http://182.92.81.159/api/sys/login'\n pass\n\n def login(self, mobile, password):\n jsonData = {'mobile': mobile, 'password': password}\n return requests.post(self.login_url, json=jsonData)\n",
"<import token>\n\n\nclass LoginApi:\n\n def __init__(self):\n self.login_url = 'http://182.92.81.159/api/sys/login'\n pass\n\n def login(self, mobile, password):\n jsonData = {'mobile': mobile, 'password': password}\n return requests.post(self.login_url, json=jsonData)\n",
"<import token>\n\n\nclass LoginApi:\n\n def __init__(self):\n self.login_url = 'http://182.92.81.159/api/sys/login'\n pass\n <function token>\n",
"<import token>\n\n\nclass LoginApi:\n <function token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,070 |
6fda74df5d4f1c62f10af0c07deee6db2f8afc41
|
import telnetlib
from time import sleep
from requests import Session
from bs4 import BeautifulSoup as bs
import re
import csv
current_id = 0
flag = False
current_user = ''
HOST = '192.168.0.1'
router_username = b'admin'
router_password = b'admin'
with telnetlib.Telnet(HOST) as tn:
tn.read_until(b"username:", 2)
tn.write(router_username + b'\n')
tn.write(router_password + b'\n')
sleep(2)
tn.write(b'wan show connection info\n')
tn.read_until(b'username=')
current_user = tn.read_very_eager().decode('ascii').splitlines()[0]
sleep(2)
tn.write(b'logout\n')
f_idpass = open("idpass.txt", "r")
c_idpass = list(csv.reader(f_idpass))
c = 0
for idpass in c_idpass:
un = idpass[0].strip()
if un == current_user :
current_id = c
break
c += 1
# runs every 5 mins
while True:
f_idpass = open("idpass.txt", "r")
router_username = b'admin'
router_password = b'admin'
HOST = '192.168.0.1'
minute_thresh = 5750
c_usages = []
c_idpass = list(csv.reader(f_idpass))
minute_limit = 0
for idpass in c_idpass:
try:
un = idpass[0].strip()
pw = idpass[1].strip()
# un = l[(2*i)-2]
# ps = l[(2*i)-1]
with Session() as s:
site = s.get("http://10.220.20.12/index.php/home/loginProcess")
bs_content = bs(site.content, "html.parser")
login_data = {"username":un,"password":pw}
s.post("http://10.220.20.12/index.php/home/loginProcess",login_data)
home_page = s.get("http://10.220.20.12/index.php/home/dashboard")
soup = bs(home_page.content, "lxml")
table = soup.table
c = 1
li = []
try:
table_rows = table.find_all('tr')
for tr in table_rows:
td = tr.find_all('td')
row = [i.text for i in td]
if c == 2 or c == 6 or c == 5 :
li.append(row[1])
c += 1
# update list and sort
string1 = li[2]
string2 = li[1]
minute_limit = int(re.search(r'\d+', string2).group()) # limit
minute_used = int(re.search(r'\d+', string1).group()) # used minutes
print(f'{un}\t\t{minute_used}')
minute_thresh = int(.96 * float(minute_limit))
c_usages.append([un, pw, minute_limit - minute_used])
except Exception as e:
print (e)
except:
continue
print (current_id)
if int(minute_limit - c_usages[current_id][2]) > minute_thresh:
current_id = (current_id + 1) % len(c_usages)
flag = True
else :
if flag == True :
username = c_usages[current_id][0]
password = c_usages[current_id][1]
print (username)
with telnetlib.Telnet(HOST) as tn:
tn.read_until(b"username:", 2)
tn.write(router_username + b'\n')
tn.write(router_password + b'\n')
sleep(2)
tn.write(b'wan set service ewan_pppoe --protocol pppoe --username ' + username.encode('ascii') + b' --password ' + password.encode('ascii') + b' --secondConnection sec_conn_dynip\n')
sleep(2)
tn.write(b'logout\n')
flag = False
sleep(300)
#t = input("Press enter to terminate")
|
[
"import telnetlib\nfrom time import sleep\nfrom requests import Session\nfrom bs4 import BeautifulSoup as bs\nimport re\nimport csv\n\ncurrent_id = 0\nflag = False\ncurrent_user = ''\n\nHOST = '192.168.0.1'\nrouter_username = b'admin'\nrouter_password = b'admin'\n\nwith telnetlib.Telnet(HOST) as tn:\n tn.read_until(b\"username:\", 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n\n sleep(2)\n tn.write(b'wan show connection info\\n')\n\n tn.read_until(b'username=')\n \n current_user = tn.read_very_eager().decode('ascii').splitlines()[0]\n \n sleep(2)\n tn.write(b'logout\\n')\n\nf_idpass = open(\"idpass.txt\", \"r\")\nc_idpass = list(csv.reader(f_idpass))\n\nc = 0 \nfor idpass in c_idpass:\n\n un = idpass[0].strip()\n\n if un == current_user :\n current_id = c\n break\n c += 1\n\n# runs every 5 mins\nwhile True:\n f_idpass = open(\"idpass.txt\", \"r\")\n\n router_username = b'admin'\n router_password = b'admin'\n\n HOST = '192.168.0.1'\n\n minute_thresh = 5750\n c_usages = []\n\n c_idpass = list(csv.reader(f_idpass))\n \n minute_limit = 0\n\n for idpass in c_idpass:\n\n try:\n un = idpass[0].strip()\n pw = idpass[1].strip()\n\n # un = l[(2*i)-2]\n # ps = l[(2*i)-1]\n \n with Session() as s:\n site = s.get(\"http://10.220.20.12/index.php/home/loginProcess\")\n bs_content = bs(site.content, \"html.parser\")\n login_data = {\"username\":un,\"password\":pw}\n s.post(\"http://10.220.20.12/index.php/home/loginProcess\",login_data)\n home_page = s.get(\"http://10.220.20.12/index.php/home/dashboard\")\n soup = bs(home_page.content, \"lxml\")\n\n table = soup.table\n\n c = 1\n li = []\n\n try:\n table_rows = table.find_all('tr')\n for tr in table_rows:\n td = tr.find_all('td')\n row = [i.text for i in td]\n if c == 2 or c == 6 or c == 5 :\n li.append(row[1])\n c += 1\n\n # update list and sort\n string1 = li[2]\n string2 = li[1]\n minute_limit = int(re.search(r'\\d+', string2).group()) # limit\n minute_used = int(re.search(r'\\d+', string1).group()) # used minutes\n print(f'{un}\\t\\t{minute_used}')\n\n minute_thresh = int(.96 * float(minute_limit))\n c_usages.append([un, pw, minute_limit - minute_used])\n \n except Exception as e:\n print (e)\n except:\n continue\n \n print (current_id)\n if int(minute_limit - c_usages[current_id][2]) > minute_thresh:\n current_id = (current_id + 1) % len(c_usages)\n flag = True\n \n else :\n if flag == True :\n username = c_usages[current_id][0]\n password = c_usages[current_id][1]\n print (username)\n\n with telnetlib.Telnet(HOST) as tn:\n tn.read_until(b\"username:\", 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n\n sleep(2)\n tn.write(b'wan set service ewan_pppoe --protocol pppoe --username ' + username.encode('ascii') + b' --password ' + password.encode('ascii') + b' --secondConnection sec_conn_dynip\\n')\n \n sleep(2)\n tn.write(b'logout\\n')\n flag = False \n \n sleep(300)\n\n#t = input(\"Press enter to terminate\")\n",
"import telnetlib\nfrom time import sleep\nfrom requests import Session\nfrom bs4 import BeautifulSoup as bs\nimport re\nimport csv\ncurrent_id = 0\nflag = False\ncurrent_user = ''\nHOST = '192.168.0.1'\nrouter_username = b'admin'\nrouter_password = b'admin'\nwith telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(b'wan show connection info\\n')\n tn.read_until(b'username=')\n current_user = tn.read_very_eager().decode('ascii').splitlines()[0]\n sleep(2)\n tn.write(b'logout\\n')\nf_idpass = open('idpass.txt', 'r')\nc_idpass = list(csv.reader(f_idpass))\nc = 0\nfor idpass in c_idpass:\n un = idpass[0].strip()\n if un == current_user:\n current_id = c\n break\n c += 1\nwhile True:\n f_idpass = open('idpass.txt', 'r')\n router_username = b'admin'\n router_password = b'admin'\n HOST = '192.168.0.1'\n minute_thresh = 5750\n c_usages = []\n c_idpass = list(csv.reader(f_idpass))\n minute_limit = 0\n for idpass in c_idpass:\n try:\n un = idpass[0].strip()\n pw = idpass[1].strip()\n with Session() as s:\n site = s.get('http://10.220.20.12/index.php/home/loginProcess')\n bs_content = bs(site.content, 'html.parser')\n login_data = {'username': un, 'password': pw}\n s.post('http://10.220.20.12/index.php/home/loginProcess',\n login_data)\n home_page = s.get(\n 'http://10.220.20.12/index.php/home/dashboard')\n soup = bs(home_page.content, 'lxml')\n table = soup.table\n c = 1\n li = []\n try:\n table_rows = table.find_all('tr')\n for tr in table_rows:\n td = tr.find_all('td')\n row = [i.text for i in td]\n if c == 2 or c == 6 or c == 5:\n li.append(row[1])\n c += 1\n string1 = li[2]\n string2 = li[1]\n minute_limit = int(re.search('\\\\d+', string2).group())\n minute_used = int(re.search('\\\\d+', string1).group())\n print(f'{un}\\t\\t{minute_used}')\n minute_thresh = int(0.96 * float(minute_limit))\n c_usages.append([un, pw, minute_limit - minute_used])\n except Exception as e:\n print(e)\n except:\n continue\n print(current_id)\n if int(minute_limit - c_usages[current_id][2]) > minute_thresh:\n current_id = (current_id + 1) % len(c_usages)\n flag = True\n elif flag == True:\n username = c_usages[current_id][0]\n password = c_usages[current_id][1]\n print(username)\n with telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(\n b'wan set service ewan_pppoe --protocol pppoe --username ' +\n username.encode('ascii') + b' --password ' + password.\n encode('ascii') + b' --secondConnection sec_conn_dynip\\n')\n sleep(2)\n tn.write(b'logout\\n')\n flag = False\n sleep(300)\n",
"<import token>\ncurrent_id = 0\nflag = False\ncurrent_user = ''\nHOST = '192.168.0.1'\nrouter_username = b'admin'\nrouter_password = b'admin'\nwith telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(b'wan show connection info\\n')\n tn.read_until(b'username=')\n current_user = tn.read_very_eager().decode('ascii').splitlines()[0]\n sleep(2)\n tn.write(b'logout\\n')\nf_idpass = open('idpass.txt', 'r')\nc_idpass = list(csv.reader(f_idpass))\nc = 0\nfor idpass in c_idpass:\n un = idpass[0].strip()\n if un == current_user:\n current_id = c\n break\n c += 1\nwhile True:\n f_idpass = open('idpass.txt', 'r')\n router_username = b'admin'\n router_password = b'admin'\n HOST = '192.168.0.1'\n minute_thresh = 5750\n c_usages = []\n c_idpass = list(csv.reader(f_idpass))\n minute_limit = 0\n for idpass in c_idpass:\n try:\n un = idpass[0].strip()\n pw = idpass[1].strip()\n with Session() as s:\n site = s.get('http://10.220.20.12/index.php/home/loginProcess')\n bs_content = bs(site.content, 'html.parser')\n login_data = {'username': un, 'password': pw}\n s.post('http://10.220.20.12/index.php/home/loginProcess',\n login_data)\n home_page = s.get(\n 'http://10.220.20.12/index.php/home/dashboard')\n soup = bs(home_page.content, 'lxml')\n table = soup.table\n c = 1\n li = []\n try:\n table_rows = table.find_all('tr')\n for tr in table_rows:\n td = tr.find_all('td')\n row = [i.text for i in td]\n if c == 2 or c == 6 or c == 5:\n li.append(row[1])\n c += 1\n string1 = li[2]\n string2 = li[1]\n minute_limit = int(re.search('\\\\d+', string2).group())\n minute_used = int(re.search('\\\\d+', string1).group())\n print(f'{un}\\t\\t{minute_used}')\n minute_thresh = int(0.96 * float(minute_limit))\n c_usages.append([un, pw, minute_limit - minute_used])\n except Exception as e:\n print(e)\n except:\n continue\n print(current_id)\n if int(minute_limit - c_usages[current_id][2]) > minute_thresh:\n current_id = (current_id + 1) % len(c_usages)\n flag = True\n elif flag == True:\n username = c_usages[current_id][0]\n password = c_usages[current_id][1]\n print(username)\n with telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(\n b'wan set service ewan_pppoe --protocol pppoe --username ' +\n username.encode('ascii') + b' --password ' + password.\n encode('ascii') + b' --secondConnection sec_conn_dynip\\n')\n sleep(2)\n tn.write(b'logout\\n')\n flag = False\n sleep(300)\n",
"<import token>\n<assignment token>\nwith telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(b'wan show connection info\\n')\n tn.read_until(b'username=')\n current_user = tn.read_very_eager().decode('ascii').splitlines()[0]\n sleep(2)\n tn.write(b'logout\\n')\n<assignment token>\nfor idpass in c_idpass:\n un = idpass[0].strip()\n if un == current_user:\n current_id = c\n break\n c += 1\nwhile True:\n f_idpass = open('idpass.txt', 'r')\n router_username = b'admin'\n router_password = b'admin'\n HOST = '192.168.0.1'\n minute_thresh = 5750\n c_usages = []\n c_idpass = list(csv.reader(f_idpass))\n minute_limit = 0\n for idpass in c_idpass:\n try:\n un = idpass[0].strip()\n pw = idpass[1].strip()\n with Session() as s:\n site = s.get('http://10.220.20.12/index.php/home/loginProcess')\n bs_content = bs(site.content, 'html.parser')\n login_data = {'username': un, 'password': pw}\n s.post('http://10.220.20.12/index.php/home/loginProcess',\n login_data)\n home_page = s.get(\n 'http://10.220.20.12/index.php/home/dashboard')\n soup = bs(home_page.content, 'lxml')\n table = soup.table\n c = 1\n li = []\n try:\n table_rows = table.find_all('tr')\n for tr in table_rows:\n td = tr.find_all('td')\n row = [i.text for i in td]\n if c == 2 or c == 6 or c == 5:\n li.append(row[1])\n c += 1\n string1 = li[2]\n string2 = li[1]\n minute_limit = int(re.search('\\\\d+', string2).group())\n minute_used = int(re.search('\\\\d+', string1).group())\n print(f'{un}\\t\\t{minute_used}')\n minute_thresh = int(0.96 * float(minute_limit))\n c_usages.append([un, pw, minute_limit - minute_used])\n except Exception as e:\n print(e)\n except:\n continue\n print(current_id)\n if int(minute_limit - c_usages[current_id][2]) > minute_thresh:\n current_id = (current_id + 1) % len(c_usages)\n flag = True\n elif flag == True:\n username = c_usages[current_id][0]\n password = c_usages[current_id][1]\n print(username)\n with telnetlib.Telnet(HOST) as tn:\n tn.read_until(b'username:', 2)\n tn.write(router_username + b'\\n')\n tn.write(router_password + b'\\n')\n sleep(2)\n tn.write(\n b'wan set service ewan_pppoe --protocol pppoe --username ' +\n username.encode('ascii') + b' --password ' + password.\n encode('ascii') + b' --secondConnection sec_conn_dynip\\n')\n sleep(2)\n tn.write(b'logout\\n')\n flag = False\n sleep(300)\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,071 |
a2fb08013aa2d92c6d06870c17e4c2f1fbde40f3
|
import pandas as pd
import mysql.connector as cn
conn=cn.connect(host='localhost', user='root', passwd='root', database='emp')
qry1='select * from info'
df=pd.read_sql(qry1 , conn)
print(df)
|
[
"import pandas as pd\r\nimport mysql.connector as cn\r\n\r\nconn=cn.connect(host='localhost', user='root', passwd='root', database='emp')\r\nqry1='select * from info'\r\ndf=pd.read_sql(qry1 , conn)\r\nprint(df)\r\n",
"import pandas as pd\nimport mysql.connector as cn\nconn = cn.connect(host='localhost', user='root', passwd='root', database='emp')\nqry1 = 'select * from info'\ndf = pd.read_sql(qry1, conn)\nprint(df)\n",
"<import token>\nconn = cn.connect(host='localhost', user='root', passwd='root', database='emp')\nqry1 = 'select * from info'\ndf = pd.read_sql(qry1, conn)\nprint(df)\n",
"<import token>\n<assignment token>\nprint(df)\n",
"<import token>\n<assignment token>\n<code token>\n"
] | false |
99,072 |
23c10c35854857e49efbe0059daaf0d5ae4c70d4
|
from django.contrib import admin
from django.db import models
from easy_select2.widgets import Select2Multiple
from news.models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('title', 'pub_date', 'author')
readonly_fields = ('slug',)
exclude = ('author',)
formfield_overrides = {
models.ManyToManyField: {'widget': Select2Multiple()}
}
def save_model(self, request, obj, form, change):
if not change:
obj.author = request.user
obj.save()
admin.site.register(Entry, EntryAdmin)
|
[
"from django.contrib import admin\nfrom django.db import models\nfrom easy_select2.widgets import Select2Multiple\nfrom news.models import Entry\n\n\nclass EntryAdmin(admin.ModelAdmin):\n list_display = ('title', 'pub_date', 'author')\n readonly_fields = ('slug',)\n exclude = ('author',)\n\n formfield_overrides = {\n models.ManyToManyField: {'widget': Select2Multiple()}\n }\n\n def save_model(self, request, obj, form, change):\n if not change:\n obj.author = request.user\n obj.save()\n\nadmin.site.register(Entry, EntryAdmin)\n",
"from django.contrib import admin\nfrom django.db import models\nfrom easy_select2.widgets import Select2Multiple\nfrom news.models import Entry\n\n\nclass EntryAdmin(admin.ModelAdmin):\n list_display = 'title', 'pub_date', 'author'\n readonly_fields = 'slug',\n exclude = 'author',\n formfield_overrides = {models.ManyToManyField: {'widget':\n Select2Multiple()}}\n\n def save_model(self, request, obj, form, change):\n if not change:\n obj.author = request.user\n obj.save()\n\n\nadmin.site.register(Entry, EntryAdmin)\n",
"<import token>\n\n\nclass EntryAdmin(admin.ModelAdmin):\n list_display = 'title', 'pub_date', 'author'\n readonly_fields = 'slug',\n exclude = 'author',\n formfield_overrides = {models.ManyToManyField: {'widget':\n Select2Multiple()}}\n\n def save_model(self, request, obj, form, change):\n if not change:\n obj.author = request.user\n obj.save()\n\n\nadmin.site.register(Entry, EntryAdmin)\n",
"<import token>\n\n\nclass EntryAdmin(admin.ModelAdmin):\n list_display = 'title', 'pub_date', 'author'\n readonly_fields = 'slug',\n exclude = 'author',\n formfield_overrides = {models.ManyToManyField: {'widget':\n Select2Multiple()}}\n\n def save_model(self, request, obj, form, change):\n if not change:\n obj.author = request.user\n obj.save()\n\n\n<code token>\n",
"<import token>\n\n\nclass EntryAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def save_model(self, request, obj, form, change):\n if not change:\n obj.author = request.user\n obj.save()\n\n\n<code token>\n",
"<import token>\n\n\nclass EntryAdmin(admin.ModelAdmin):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\n<code token>\n",
"<import token>\n<class token>\n<code token>\n"
] | false |
99,073 |
6305c9b1db91d10aa793c21cfee2dcb89cd528c3
|
class Calc:
def __init__(self, max_value, mod):
"""combination(max_value, all)"""
fact = [-1] * (max_value + 1)
fact[0] = 1
fact[1] = 1
for x in range(2, max_value + 1):
fact[x] = x * fact[x - 1] % mod
invs = [1] * (max_value + 1)
invs[max_value] = pow(fact[max_value], mod - 2, mod)
for x in range(max_value - 1, 0, -1):
invs[x] = invs[x + 1] * (x + 1) % mod
self.fact = fact
self.invs = invs
self.mod = mod
def nCr(self, n, r):
r = min(n - r, r)
if r < 0: return 0
if r == 0: return 1
if r == 1: return n
return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod
def nHr(self, n, r):
return self.nCr(n - 1 + r, r)
class PowPreCalc:
def __init__(self, *, b, m, mod):
"""(b**m)%mod"""
res = [1]
t = 1
for _ in range(m):
t = t * b % mod
res.append(t)
self._res = res
def get_pow(self, m):
"""(b**m)%mod"""
return self._res[m]
def main():
MOD = 10 ** 9 + 7
K = int(input())
S = input()
N = len(S)
calc = Calc(max_value=N - 1 + K, mod=MOD)
p26 = PowPreCalc(b=26, m=K, mod=MOD)
p25 = PowPreCalc(b=25, m=K, mod=MOD)
ans = 0
for tail_len in range(K + 1):
ans = (ans
+ calc.nCr(N - 1 + K - tail_len, N - 1) * p26.get_pow(tail_len) * p25.get_pow(K - tail_len)
) % MOD
print(ans)
if __name__ == '__main__':
main()
|
[
"class Calc:\n def __init__(self, max_value, mod):\n \"\"\"combination(max_value, all)\"\"\"\n fact = [-1] * (max_value + 1)\n fact[0] = 1\n fact[1] = 1\n for x in range(2, max_value + 1):\n fact[x] = x * fact[x - 1] % mod\n\n invs = [1] * (max_value + 1)\n invs[max_value] = pow(fact[max_value], mod - 2, mod)\n for x in range(max_value - 1, 0, -1):\n invs[x] = invs[x + 1] * (x + 1) % mod\n\n self.fact = fact\n self.invs = invs\n self.mod = mod\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0: return 0\n if r == 0: return 1\n if r == 1: return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n\n def nHr(self, n, r):\n return self.nCr(n - 1 + r, r)\n\n\nclass PowPreCalc:\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\ndef main():\n MOD = 10 ** 9 + 7\n\n K = int(input())\n S = input()\n\n N = len(S)\n\n calc = Calc(max_value=N - 1 + K, mod=MOD)\n\n p26 = PowPreCalc(b=26, m=K, mod=MOD)\n p25 = PowPreCalc(b=25, m=K, mod=MOD)\n\n ans = 0\n for tail_len in range(K + 1):\n ans = (ans\n + calc.nCr(N - 1 + K - tail_len, N - 1) * p26.get_pow(tail_len) * p25.get_pow(K - tail_len)\n ) % MOD\n\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"class Calc:\n\n def __init__(self, max_value, mod):\n \"\"\"combination(max_value, all)\"\"\"\n fact = [-1] * (max_value + 1)\n fact[0] = 1\n fact[1] = 1\n for x in range(2, max_value + 1):\n fact[x] = x * fact[x - 1] % mod\n invs = [1] * (max_value + 1)\n invs[max_value] = pow(fact[max_value], mod - 2, mod)\n for x in range(max_value - 1, 0, -1):\n invs[x] = invs[x + 1] * (x + 1) % mod\n self.fact = fact\n self.invs = invs\n self.mod = mod\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0:\n return 0\n if r == 0:\n return 1\n if r == 1:\n return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n\n def nHr(self, n, r):\n return self.nCr(n - 1 + r, r)\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\ndef main():\n MOD = 10 ** 9 + 7\n K = int(input())\n S = input()\n N = len(S)\n calc = Calc(max_value=N - 1 + K, mod=MOD)\n p26 = PowPreCalc(b=26, m=K, mod=MOD)\n p25 = PowPreCalc(b=25, m=K, mod=MOD)\n ans = 0\n for tail_len in range(K + 1):\n ans = (ans + calc.nCr(N - 1 + K - tail_len, N - 1) * p26.get_pow(\n tail_len) * p25.get_pow(K - tail_len)) % MOD\n print(ans)\n\n\nif __name__ == '__main__':\n main()\n",
"class Calc:\n\n def __init__(self, max_value, mod):\n \"\"\"combination(max_value, all)\"\"\"\n fact = [-1] * (max_value + 1)\n fact[0] = 1\n fact[1] = 1\n for x in range(2, max_value + 1):\n fact[x] = x * fact[x - 1] % mod\n invs = [1] * (max_value + 1)\n invs[max_value] = pow(fact[max_value], mod - 2, mod)\n for x in range(max_value - 1, 0, -1):\n invs[x] = invs[x + 1] * (x + 1) % mod\n self.fact = fact\n self.invs = invs\n self.mod = mod\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0:\n return 0\n if r == 0:\n return 1\n if r == 1:\n return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n\n def nHr(self, n, r):\n return self.nCr(n - 1 + r, r)\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\ndef main():\n MOD = 10 ** 9 + 7\n K = int(input())\n S = input()\n N = len(S)\n calc = Calc(max_value=N - 1 + K, mod=MOD)\n p26 = PowPreCalc(b=26, m=K, mod=MOD)\n p25 = PowPreCalc(b=25, m=K, mod=MOD)\n ans = 0\n for tail_len in range(K + 1):\n ans = (ans + calc.nCr(N - 1 + K - tail_len, N - 1) * p26.get_pow(\n tail_len) * p25.get_pow(K - tail_len)) % MOD\n print(ans)\n\n\n<code token>\n",
"class Calc:\n\n def __init__(self, max_value, mod):\n \"\"\"combination(max_value, all)\"\"\"\n fact = [-1] * (max_value + 1)\n fact[0] = 1\n fact[1] = 1\n for x in range(2, max_value + 1):\n fact[x] = x * fact[x - 1] % mod\n invs = [1] * (max_value + 1)\n invs[max_value] = pow(fact[max_value], mod - 2, mod)\n for x in range(max_value - 1, 0, -1):\n invs[x] = invs[x + 1] * (x + 1) % mod\n self.fact = fact\n self.invs = invs\n self.mod = mod\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0:\n return 0\n if r == 0:\n return 1\n if r == 1:\n return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n\n def nHr(self, n, r):\n return self.nCr(n - 1 + r, r)\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"class Calc:\n\n def __init__(self, max_value, mod):\n \"\"\"combination(max_value, all)\"\"\"\n fact = [-1] * (max_value + 1)\n fact[0] = 1\n fact[1] = 1\n for x in range(2, max_value + 1):\n fact[x] = x * fact[x - 1] % mod\n invs = [1] * (max_value + 1)\n invs[max_value] = pow(fact[max_value], mod - 2, mod)\n for x in range(max_value - 1, 0, -1):\n invs[x] = invs[x + 1] * (x + 1) % mod\n self.fact = fact\n self.invs = invs\n self.mod = mod\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0:\n return 0\n if r == 0:\n return 1\n if r == 1:\n return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n <function token>\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"class Calc:\n <function token>\n\n def nCr(self, n, r):\n r = min(n - r, r)\n if r < 0:\n return 0\n if r == 0:\n return 1\n if r == 1:\n return n\n return self.fact[n] * self.invs[r] * self.invs[n - r] % self.mod\n <function token>\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"class Calc:\n <function token>\n <function token>\n <function token>\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"<class token>\n\n\nclass PowPreCalc:\n\n def __init__(self, *, b, m, mod):\n \"\"\"(b**m)%mod\"\"\"\n res = [1]\n t = 1\n for _ in range(m):\n t = t * b % mod\n res.append(t)\n self._res = res\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"<class token>\n\n\nclass PowPreCalc:\n <function token>\n\n def get_pow(self, m):\n \"\"\"(b**m)%mod\"\"\"\n return self._res[m]\n\n\n<function token>\n<code token>\n",
"<class token>\n\n\nclass PowPreCalc:\n <function token>\n <function token>\n\n\n<function token>\n<code token>\n",
"<class token>\n<class token>\n<function token>\n<code token>\n"
] | false |
99,074 |
d407888fdfb3e6d3fecf3a6940636d6ec1aea5e5
|
n = int(input())
num = list(map(int, input().split()))
res = []
for i in range(1, n+1):
cnt = 0
x = num.index(i)
tmp = num[0:x]
for j in tmp:
if j > i:
cnt += 1
res.append(cnt)
for i in res:
print(i, end=" ")
|
[
"n = int(input())\nnum = list(map(int, input().split()))\n\nres = []\n\nfor i in range(1, n+1):\n cnt = 0\n x = num.index(i)\n tmp = num[0:x]\n for j in tmp:\n if j > i:\n cnt += 1\n res.append(cnt)\n\nfor i in res:\n print(i, end=\" \")\n ",
"n = int(input())\nnum = list(map(int, input().split()))\nres = []\nfor i in range(1, n + 1):\n cnt = 0\n x = num.index(i)\n tmp = num[0:x]\n for j in tmp:\n if j > i:\n cnt += 1\n res.append(cnt)\nfor i in res:\n print(i, end=' ')\n",
"<assignment token>\nfor i in range(1, n + 1):\n cnt = 0\n x = num.index(i)\n tmp = num[0:x]\n for j in tmp:\n if j > i:\n cnt += 1\n res.append(cnt)\nfor i in res:\n print(i, end=' ')\n",
"<assignment token>\n<code token>\n"
] | false |
99,075 |
bd6d7699e8f4e0d82fefa51a0c26762e33acd4d0
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--file',
dest='file',
default='/tmp/backup.gz',
help='gz file to restore from'),
make_option('--analyze_only',
dest='analyze_only',
action='store_true',
default=False,
help='donot read file, just analyze')
)
help = 'Restore reports from file'
def handle(self, *args, **options):
import reports.logic
import analysis.logic
self.stdout.write('analyze_only = %s' % (options['analyze_only']))
self.stdout.write('file = %s' % (options['file']))
if not options['analyze_only']:
reports.logic.restore_reports(options['file'])
analysis.logic.analyze_raw_reports()
|
[
"from django.core.management.base import BaseCommand, CommandError\nfrom optparse import make_option\n\nclass Command(BaseCommand):\n option_list = BaseCommand.option_list + ( \n make_option('--file',\n dest='file',\n default='/tmp/backup.gz',\n help='gz file to restore from'),\n make_option('--analyze_only',\n dest='analyze_only',\n action='store_true',\n default=False,\n help='donot read file, just analyze')\n )\n \n help = 'Restore reports from file'\n def handle(self, *args, **options):\n import reports.logic\n import analysis.logic\n self.stdout.write('analyze_only = %s' % (options['analyze_only']))\n self.stdout.write('file = %s' % (options['file']))\n if not options['analyze_only']:\n reports.logic.restore_reports(options['file'])\n analysis.logic.analyze_raw_reports()\n\n \n \n",
"from django.core.management.base import BaseCommand, CommandError\nfrom optparse import make_option\n\n\nclass Command(BaseCommand):\n option_list = BaseCommand.option_list + (make_option('--file', dest=\n 'file', default='/tmp/backup.gz', help='gz file to restore from'),\n make_option('--analyze_only', dest='analyze_only', action=\n 'store_true', default=False, help='donot read file, just analyze'))\n help = 'Restore reports from file'\n\n def handle(self, *args, **options):\n import reports.logic\n import analysis.logic\n self.stdout.write('analyze_only = %s' % options['analyze_only'])\n self.stdout.write('file = %s' % options['file'])\n if not options['analyze_only']:\n reports.logic.restore_reports(options['file'])\n analysis.logic.analyze_raw_reports()\n",
"<import token>\n\n\nclass Command(BaseCommand):\n option_list = BaseCommand.option_list + (make_option('--file', dest=\n 'file', default='/tmp/backup.gz', help='gz file to restore from'),\n make_option('--analyze_only', dest='analyze_only', action=\n 'store_true', default=False, help='donot read file, just analyze'))\n help = 'Restore reports from file'\n\n def handle(self, *args, **options):\n import reports.logic\n import analysis.logic\n self.stdout.write('analyze_only = %s' % options['analyze_only'])\n self.stdout.write('file = %s' % options['file'])\n if not options['analyze_only']:\n reports.logic.restore_reports(options['file'])\n analysis.logic.analyze_raw_reports()\n",
"<import token>\n\n\nclass Command(BaseCommand):\n <assignment token>\n <assignment token>\n\n def handle(self, *args, **options):\n import reports.logic\n import analysis.logic\n self.stdout.write('analyze_only = %s' % options['analyze_only'])\n self.stdout.write('file = %s' % options['file'])\n if not options['analyze_only']:\n reports.logic.restore_reports(options['file'])\n analysis.logic.analyze_raw_reports()\n",
"<import token>\n\n\nclass Command(BaseCommand):\n <assignment token>\n <assignment token>\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,076 |
49ba2676f44055fbb3eeb75b2a8f929ea3227dc1
|
def possible(stones, k, mid):
count = 0
for stone in stones:
# 음수인 경우는 건너지 못한 경우이므로 이 구간마다 count += 1 해주기
if stone - mid < 0:
count += 1
# 연속된 음수가 k개인 경우 바로 False를 리턴한다.
if count >= k:
return False
# 연속되지 않은 경우이므로 count = 0으로 초기화
else:
count = 0
return True
def solution(stones, k):
answer = 0
# 초기값 설정
start, end = 1, max(stones)
while start <= end:
mid = (start + end) // 2
# 해당 인원이 징검다리를 건널 수 있는지 체크
if possible(stones, k, mid):
answer = max(answer, mid)
start = mid + 1
else:
end = mid - 1
return answer
|
[
"def possible(stones, k, mid):\n count = 0\n for stone in stones:\n # 음수인 경우는 건너지 못한 경우이므로 이 구간마다 count += 1 해주기\n if stone - mid < 0:\n count += 1\n # 연속된 음수가 k개인 경우 바로 False를 리턴한다.\n if count >= k:\n return False\n # 연속되지 않은 경우이므로 count = 0으로 초기화\n else:\n count = 0\n return True\n\n\ndef solution(stones, k):\n answer = 0\n # 초기값 설정\n start, end = 1, max(stones)\n while start <= end:\n mid = (start + end) // 2\n\n # 해당 인원이 징검다리를 건널 수 있는지 체크\n if possible(stones, k, mid):\n answer = max(answer, mid)\n start = mid + 1\n else:\n end = mid - 1\n return answer",
"def possible(stones, k, mid):\n count = 0\n for stone in stones:\n if stone - mid < 0:\n count += 1\n if count >= k:\n return False\n else:\n count = 0\n return True\n\n\ndef solution(stones, k):\n answer = 0\n start, end = 1, max(stones)\n while start <= end:\n mid = (start + end) // 2\n if possible(stones, k, mid):\n answer = max(answer, mid)\n start = mid + 1\n else:\n end = mid - 1\n return answer\n",
"<function token>\n\n\ndef solution(stones, k):\n answer = 0\n start, end = 1, max(stones)\n while start <= end:\n mid = (start + end) // 2\n if possible(stones, k, mid):\n answer = max(answer, mid)\n start = mid + 1\n else:\n end = mid - 1\n return answer\n",
"<function token>\n<function token>\n"
] | false |
99,077 |
cc10cb7b6ed1938641812f8df01351ef38e92f9d
|
"""
Function: Construct a gene object with its location
Created: 2013-07-31
Author: Chelsea Ju
"""
class Gene:
def __init__(self, name, chr, strand, start, end):
self.name = name
self.chr = chr
self.strand = strand
self.start = int(start)
self.end = int(end)
self.primary = 0
self.secondary = 0
self.size = self.end - self.start + 1
def __repr__(self):
return repr((self.name, self.chr, self.strand, self.start, self.end, self.size))
def set_name(self, name):
self.name = name
def set_start(self, position):
self.start = position
def set_end(self, position):
self.end = position
def set_size(self, length):
exon_length = length.split(",")
self.size = sum(int(x) for x in exon_length)
def add_primary_fragment(self, count):
self.primary += count
def add_secondary_fragment(self, count):
self.secondary += count
def set_primary_fragment(self, count):
self.primary = count
def set_secondary_fragment(self, count):
self.secondary = count
def get_fragment(self, type = 0):
# type = 0 : combine primary and secondary
# type = 1 : primary fragments
# type = 2 : secondary fragments
if(type == 0):
return float(self.primary) + float(self.secondary)
elif(type == 1):
return float(self.primary)
elif(type == 2):
return float(self.secondary)
else:
print ("Invalid Fragments Type: %d" %(type))
sys.exit(2)
def get_fpkm(self,total_fragment, type = 0):
# type = 0 : combine primary and secondary
# type = 1 : primary fragments
# type = 2 : secondary fragments
try:
self.size != 0
except:
print("Gene size can not be zero")
sys.exit(2)
try:
total_fragment < 1
except:
print ("Invalid total fragment count")
sys.exit(2)
if(type == 0):
return (float(self.primary) + float(self.secondary))* float(10**9) / (float(self.size) * float(total_fragment))
elif(type == 1):
return (float(self.primary) * float(10**9)) / (float(self.size) * float(total_fragment))
elif(type == 2):
return (float(self.secondary) * float(10**9))/ (float(self.size) * float(total_fragment))
else:
print ("Invalid fragments type: %d" %(type))
sys.exit(2)
|
[
"\"\"\"\nFunction: Construct a gene object with its location\nCreated: 2013-07-31\nAuthor: Chelsea Ju\n\"\"\"\nclass Gene:\n \n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n \n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end, self.size))\n\n def set_name(self, name):\n self.name = name\n \n def set_start(self, position):\n self.start = position\n \n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(\",\")\n self.size = sum(int(x) for x in exon_length) \n\n def add_primary_fragment(self, count):\n self.primary += count\n\n def add_secondary_fragment(self, count):\n self.secondary += count\n\n def set_primary_fragment(self, count):\n self.primary = count\n\n def set_secondary_fragment(self, count):\n self.secondary = count\n\n \n def get_fragment(self, type = 0):\n # type = 0 : combine primary and secondary\n # type = 1 : primary fragments\n # type = 2 : secondary fragments\n\n if(type == 0):\n return float(self.primary) + float(self.secondary)\n elif(type == 1):\n return float(self.primary)\n elif(type == 2):\n return float(self.secondary)\n else:\n print (\"Invalid Fragments Type: %d\" %(type))\n sys.exit(2)\n\n \n def get_fpkm(self,total_fragment, type = 0):\n # type = 0 : combine primary and secondary\n # type = 1 : primary fragments\n # type = 2 : secondary fragments\n try:\n self.size != 0\n except:\n print(\"Gene size can not be zero\")\n sys.exit(2)\n \n try:\n total_fragment < 1\n except:\n print (\"Invalid total fragment count\")\n sys.exit(2)\n\n if(type == 0):\n return (float(self.primary) + float(self.secondary))* float(10**9) / (float(self.size) * float(total_fragment))\n elif(type == 1):\n return (float(self.primary) * float(10**9)) / (float(self.size) * float(total_fragment))\n elif(type == 2):\n return (float(self.secondary) * float(10**9))/ (float(self.size) * float(total_fragment))\n else:\n print (\"Invalid fragments type: %d\" %(type))\n sys.exit(2)",
"<docstring token>\n\n\nclass Gene:\n\n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n\n def set_start(self, position):\n self.start = position\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n\n def add_secondary_fragment(self, count):\n self.secondary += count\n\n def set_primary_fragment(self, count):\n self.primary = count\n\n def set_secondary_fragment(self, count):\n self.secondary = count\n\n def get_fragment(self, type=0):\n if type == 0:\n return float(self.primary) + float(self.secondary)\n elif type == 1:\n return float(self.primary)\n elif type == 2:\n return float(self.secondary)\n else:\n print('Invalid Fragments Type: %d' % type)\n sys.exit(2)\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n\n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n\n def add_secondary_fragment(self, count):\n self.secondary += count\n\n def set_primary_fragment(self, count):\n self.primary = count\n\n def set_secondary_fragment(self, count):\n self.secondary = count\n\n def get_fragment(self, type=0):\n if type == 0:\n return float(self.primary) + float(self.secondary)\n elif type == 1:\n return float(self.primary)\n elif type == 2:\n return float(self.secondary)\n else:\n print('Invalid Fragments Type: %d' % type)\n sys.exit(2)\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n\n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n\n def add_secondary_fragment(self, count):\n self.secondary += count\n\n def set_primary_fragment(self, count):\n self.primary = count\n <function token>\n\n def get_fragment(self, type=0):\n if type == 0:\n return float(self.primary) + float(self.secondary)\n elif type == 1:\n return float(self.primary)\n elif type == 2:\n return float(self.secondary)\n else:\n print('Invalid Fragments Type: %d' % type)\n sys.exit(2)\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n\n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n\n def set_primary_fragment(self, count):\n self.primary = count\n <function token>\n\n def get_fragment(self, type=0):\n if type == 0:\n return float(self.primary) + float(self.secondary)\n elif type == 1:\n return float(self.primary)\n elif type == 2:\n return float(self.secondary)\n else:\n print('Invalid Fragments Type: %d' % type)\n sys.exit(2)\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n\n def __init__(self, name, chr, strand, start, end):\n self.name = name\n self.chr = chr\n self.strand = strand\n self.start = int(start)\n self.end = int(end)\n self.primary = 0\n self.secondary = 0\n self.size = self.end - self.start + 1\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n\n def set_primary_fragment(self, count):\n self.primary = count\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n\n def set_end(self, position):\n self.end = position\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n\n def set_primary_fragment(self, count):\n self.primary = count\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n <function token>\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n\n def set_primary_fragment(self, count):\n self.primary = count\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n\n def set_name(self, name):\n self.name = name\n <function token>\n <function token>\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n\n def __repr__(self):\n return repr((self.name, self.chr, self.strand, self.start, self.end,\n self.size))\n <function token>\n <function token>\n <function token>\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n\n def add_primary_fragment(self, count):\n self.primary += count\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_size(self, length):\n exon_length = length.split(',')\n self.size = sum(int(x) for x in exon_length)\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def get_fpkm(self, total_fragment, type=0):\n try:\n self.size != 0\n except:\n print('Gene size can not be zero')\n sys.exit(2)\n try:\n total_fragment < 1\n except:\n print('Invalid total fragment count')\n sys.exit(2)\n if type == 0:\n return (float(self.primary) + float(self.secondary)) * float(10 **\n 9) / (float(self.size) * float(total_fragment))\n elif type == 1:\n return float(self.primary) * float(10 ** 9) / (float(self.size) *\n float(total_fragment))\n elif type == 2:\n return float(self.secondary) * float(10 ** 9) / (float(self.\n size) * float(total_fragment))\n else:\n print('Invalid fragments type: %d' % type)\n sys.exit(2)\n",
"<docstring token>\n\n\nclass Gene:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<class token>\n"
] | false |
99,078 |
00596ecd9bfe00284a92f18b2fde686e37d40b56
|
"""
4880.토너먼트 카드게임
"""
def cardGames(s,l) :
if s == l :
return personCards[s-1]
else :
p1 = cardGames(s, (s+l)//2)
p2 = cardGames((s+l)//2+1,l)
if abs(p1[1]-p2[1]) == 1:
if p1[1] > p2[1] :
return p1
return p2
elif abs(p1[1]-p2[1]) == 2:
if p1[1] > p2[1] :
return p2
return p1
else :
# 비기는 경우
return p1
T = int(input())
for test_case in range(1,T+1) :
n = int(input())
cards = list(map(int,input().split()))
personCards = [[i+1,cards[i]] for i in range(len(cards))]
winner = cardGames(1, n)
print(f'#{test_case} {winner[0]}')
|
[
"\"\"\"\n4880.토너먼트 카드게임\n\"\"\"\ndef cardGames(s,l) :\n if s == l :\n return personCards[s-1]\n else :\n p1 = cardGames(s, (s+l)//2)\n p2 = cardGames((s+l)//2+1,l)\n if abs(p1[1]-p2[1]) == 1:\n if p1[1] > p2[1] :\n return p1\n return p2\n elif abs(p1[1]-p2[1]) == 2:\n if p1[1] > p2[1] :\n return p2\n return p1\n else :\n # 비기는 경우\n return p1\n\n\nT = int(input())\nfor test_case in range(1,T+1) :\n n = int(input())\n cards = list(map(int,input().split()))\n\n personCards = [[i+1,cards[i]] for i in range(len(cards))]\n\n winner = cardGames(1, n)\n print(f'#{test_case} {winner[0]}')",
"<docstring token>\n\n\ndef cardGames(s, l):\n if s == l:\n return personCards[s - 1]\n else:\n p1 = cardGames(s, (s + l) // 2)\n p2 = cardGames((s + l) // 2 + 1, l)\n if abs(p1[1] - p2[1]) == 1:\n if p1[1] > p2[1]:\n return p1\n return p2\n elif abs(p1[1] - p2[1]) == 2:\n if p1[1] > p2[1]:\n return p2\n return p1\n else:\n return p1\n\n\nT = int(input())\nfor test_case in range(1, T + 1):\n n = int(input())\n cards = list(map(int, input().split()))\n personCards = [[i + 1, cards[i]] for i in range(len(cards))]\n winner = cardGames(1, n)\n print(f'#{test_case} {winner[0]}')\n",
"<docstring token>\n\n\ndef cardGames(s, l):\n if s == l:\n return personCards[s - 1]\n else:\n p1 = cardGames(s, (s + l) // 2)\n p2 = cardGames((s + l) // 2 + 1, l)\n if abs(p1[1] - p2[1]) == 1:\n if p1[1] > p2[1]:\n return p1\n return p2\n elif abs(p1[1] - p2[1]) == 2:\n if p1[1] > p2[1]:\n return p2\n return p1\n else:\n return p1\n\n\n<assignment token>\nfor test_case in range(1, T + 1):\n n = int(input())\n cards = list(map(int, input().split()))\n personCards = [[i + 1, cards[i]] for i in range(len(cards))]\n winner = cardGames(1, n)\n print(f'#{test_case} {winner[0]}')\n",
"<docstring token>\n\n\ndef cardGames(s, l):\n if s == l:\n return personCards[s - 1]\n else:\n p1 = cardGames(s, (s + l) // 2)\n p2 = cardGames((s + l) // 2 + 1, l)\n if abs(p1[1] - p2[1]) == 1:\n if p1[1] > p2[1]:\n return p1\n return p2\n elif abs(p1[1] - p2[1]) == 2:\n if p1[1] > p2[1]:\n return p2\n return p1\n else:\n return p1\n\n\n<assignment token>\n<code token>\n",
"<docstring token>\n<function token>\n<assignment token>\n<code token>\n"
] | false |
99,079 |
7a452758bf63eee91237e5ea16067298a1fc2bfc
|
#!/usr/bin/env python3
#
# Write a program that outputs the string representation of numbers from 1 to n.
# But for multiples of three it should output “Fizz” instead of the number and
# for the multiples of five output “Buzz”. For numbers which are multiples of
# both three and five output “FizzBuzz”.
def fizzbuzz(n):
''' return a list of strings '''
return ['Fizz' * (not i % 3) + 'Buzz' * (not i % 5) or str(i)
for i in range(1, n + 1 )]
def fizzbuzz_firstattempt(n):
''' return a list of strings '''
str_list = []
for idx in range(1, n + 1):
num_str = ''
if idx % 3 == 0:
num_str += 'Fizz'
if idx % 5 == 0:
num_str += 'Buzz'
if idx % 3 != 0 and idx % 5 != 0:
num_str += str(idx)
str_list.append(num_str)
return str_list
# Tests
for string in fizzbuzz(15):
print(string)
print()
print(fizzbuzz(0))
|
[
"#!/usr/bin/env python3\n#\n# Write a program that outputs the string representation of numbers from 1 to n.\n# But for multiples of three it should output “Fizz” instead of the number and \n# for the multiples of five output “Buzz”. For numbers which are multiples of \n# both three and five output “FizzBuzz”.\n\n\ndef fizzbuzz(n):\n ''' return a list of strings '''\n return ['Fizz' * (not i % 3) + 'Buzz' * (not i % 5) or str(i)\n for i in range(1, n + 1 )]\n\n\ndef fizzbuzz_firstattempt(n):\n ''' return a list of strings '''\n str_list = []\n\n for idx in range(1, n + 1):\n num_str = ''\n if idx % 3 == 0:\n num_str += 'Fizz'\n if idx % 5 == 0:\n num_str += 'Buzz'\n if idx % 3 != 0 and idx % 5 != 0:\n num_str += str(idx)\n str_list.append(num_str)\n\n return str_list\n\n\n# Tests\nfor string in fizzbuzz(15):\n print(string)\nprint()\nprint(fizzbuzz(0))\n",
"def fizzbuzz(n):\n \"\"\" return a list of strings \"\"\"\n return [('Fizz' * (not i % 3) + 'Buzz' * (not i % 5) or str(i)) for i in\n range(1, n + 1)]\n\n\ndef fizzbuzz_firstattempt(n):\n \"\"\" return a list of strings \"\"\"\n str_list = []\n for idx in range(1, n + 1):\n num_str = ''\n if idx % 3 == 0:\n num_str += 'Fizz'\n if idx % 5 == 0:\n num_str += 'Buzz'\n if idx % 3 != 0 and idx % 5 != 0:\n num_str += str(idx)\n str_list.append(num_str)\n return str_list\n\n\nfor string in fizzbuzz(15):\n print(string)\nprint()\nprint(fizzbuzz(0))\n",
"def fizzbuzz(n):\n \"\"\" return a list of strings \"\"\"\n return [('Fizz' * (not i % 3) + 'Buzz' * (not i % 5) or str(i)) for i in\n range(1, n + 1)]\n\n\ndef fizzbuzz_firstattempt(n):\n \"\"\" return a list of strings \"\"\"\n str_list = []\n for idx in range(1, n + 1):\n num_str = ''\n if idx % 3 == 0:\n num_str += 'Fizz'\n if idx % 5 == 0:\n num_str += 'Buzz'\n if idx % 3 != 0 and idx % 5 != 0:\n num_str += str(idx)\n str_list.append(num_str)\n return str_list\n\n\n<code token>\n",
"<function token>\n\n\ndef fizzbuzz_firstattempt(n):\n \"\"\" return a list of strings \"\"\"\n str_list = []\n for idx in range(1, n + 1):\n num_str = ''\n if idx % 3 == 0:\n num_str += 'Fizz'\n if idx % 5 == 0:\n num_str += 'Buzz'\n if idx % 3 != 0 and idx % 5 != 0:\n num_str += str(idx)\n str_list.append(num_str)\n return str_list\n\n\n<code token>\n",
"<function token>\n<function token>\n<code token>\n"
] | false |
99,080 |
ff1f2c86d3dbbd04131bf4751b32b18e8ea476bd
|
# coding=utf-8
__author__ = 'kk'
__cookies_file__ = './cookies.dat'
import scrapy
from scrapy.selector import HtmlXPathSelector
import copy
import logging
import urllib2
import urllib
import cookielib
import zlib
class TestItem(scrapy.Item):
id = scrapy.Field()
name = scrapy.Field()
img = scrapy.Field()
description = scrapy.Field()
class MySpider(scrapy.Spider):
name = 'myspider'
allowed_domains = ['saraba1st.com'] #可选。包含了spider允许爬取的域名列表
# rules #CrawlSpider
# link_extractor 是一个 Link Extractor 对象。 其定义了如何从爬取到的页面提取链接。
# start_urls = [
# 'http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1'
# ]
base_url = 'http://bbs.saraba1st.com/2b/'
headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding": "gzip,deflate,sdch",
"Accept-Language": "zh-CN,zh;q=0.8",
"Connection": "keep-alive",
"Content-Type":" application/x-www-form-urlencoded; charset=UTF-8",
"User-Agent": "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_2 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8H7 Safari/6533.18.5",
}
def parse(self, response):
self.log('开始解析:%s' % response.url)
print response.meta
sel = scrapy.selector.Selector(response)
div_class = sel.xpath('//div[@class="bm_c"]')
print "xxxxxxxxx", div_class.extract()
# sel = scrapy.Selector(response)
# hxs = HtmlXPathSelector(response)
#
# # for h3 in response.xpath('//h3').extract():
# # yield TestItem(title=h3)
#
#
# # path():返回selectors列表, 每一个select表示一个xpath参数表达式选择的节点.
# # extract():返回一个unicode字符串,该字符串为XPath选择器返回的数据
# # re(): 返回unicode字符串列表,字符串作为参数由正则表达式提取出来
# items = []
# imgs = response.xpath('//div[@id="xcnr_zx"]//img')
# # for img in div.xpath('//img/@title').extract():
# # yield TestItem(img=img)
# for img in imgs:
# item = TestItem()
# item['id'] = img.xpath('@alt').extract()
# item['img'] = img.xpath('@title').extract()
# item['name'] = img.xpath('@class').extract()
# items.append(item)
# print items
# return items
# items.extend([self.make_requests_from_url(url).replace(callback=self.parse_post)
# for url in posts])
# for url in response.xpath('//a/@href').extract():
# yield scrapy.Request(url, callback=self.parse)
# def __init__(self, category=None, *args, **kwargs):
# super(MySpider, self).__init__(*args, **kwargs)
# self.start_urls = ['http://www.geimian.com/%s' % category]
def start_requests(self):
self.log('start before,but not run start_urls')
cJar = cookielib.LWPCookieJar()
file_object = False
try:
file_object = open(__cookies_file__)
cJar._really_load(file_object, file_object, False, False)
except :
print 'wenjian bucunzai '
cookiess = dict()
for item in cJar:
cookiess[item.name] = item.value
print cookiess
if file_object:
file_object.close()
return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta={'cookiejar': 1}, cookies=cookiess, headers=self.headers, callback=self.check_login)]
def check_login(self, response):
sel = scrapy.selector.Selector(response)
div_class = sel.xpath('//div[@class="pd2"]')
# print "xxxxxxxxx", div_class.xpath('//a[text()="%s"]/@href' % u'登录').extract()[0]
if div_class.xpath('//a/text()').extract()[0] == u'登录':
print '未登录'
# l6751902
login_url = self.base_url + div_class.xpath('//a[text()="%s"]/@href' % u'登录').extract()[0].encode('utf-8')
print 'login_url', login_url
return [scrapy.Request(login_url, meta = {'cookiejar' : response.meta['cookiejar']}, headers = self.headers, callback = self.logged_in)]
else:
print '登录成功 cookie'
self.parse(response)
# return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta = {'cookiejar' : 1}, headers = self.headers,dont_filter = True)]
def logged_in(self, response):
sel = scrapy.selector.Selector(response)
print "xxxxxxxxx", sel.xpath('//input[@name="formhash"]').extract()
formhash = sel.xpath('//input[@name="formhash"]/@value').extract()[0].encode('utf-8')
self.log("formhash:%s" % formhash, logging.INFO)
login_head = copy.deepcopy(self.headers)
login_head['Origin'] = 'http://bbs.saraba1st.com'
login_head['Referer'] = 'http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&mobile=1'
# http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&loginsubmit=yes&loginhash=LnDKp&mobile=yes
if self.getCookies(formhash, login_head):
self.log("重新cookies成功!!~~", logging.INFO)
self.start_requests()
else:
self.log("重新获取cookies失败!!~~", logging.ERROR)
# return [scrapy.FormRequest.from_response(response,
# meta = {'cookiejar' : response.meta['cookiejar']},
# headers = login_head,
# formdata = {
# 'formhash': formhash,
# 'referer':'http://bbs.saraba1st.com/2b/member.php?mod=clearcookies&formhash=%s&mobile=1' % formhash,
# 'fastloginfield':'username',
# 'username': 'l6751902',
# 'password': 'l35331963',
# 'submit':'登录',
# 'questionid':'0',
# 'answer':'',
# 'cookietime':'2592000'
# },
# callback = self.after_login,
# dont_filter = True
# )]
def after_login(self, response):
sel = scrapy.selector.Selector(response)
div_class = sel.xpath('//div[@class="pd2"]')
if div_class.xpath('//a/text()').extract()[0] == u'登录':
self.log('登录失败', logging.WARNING)
else:
self.log('登录成功', logging.INFO)
return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta = {'cookiejar' : response.meta['cookiejar']}, headers = self.headers,dont_filter = True)]
# def parse_start_url(self, response): #CrawlSpider
# # 当start_url的请求返回时,该方法被调用。 该方法分析最初的返回值并必须返回一个 Item对象或者 一个 Request 对象或者 一个可迭代的包含二者对象。
# pass
# def make_requests_from_url(self, url):
# # 该方法接受一个URL并返回用于爬取的 Request 对象。 该方法在初始化request时被start_requests() 调用,也被用于转化url为request。
# # 默认未被复写(overridden)的情况下,该方法返回的Request对象中, parse() 作为回调函数,dont_filter参数也被设置为开启。
# pass
def getCookies(self, formhash, login_head):
data = {'formhash': formhash,
'referer': 'http://bbs.saraba1st.com/2b/member.php?mod=clearcookies&formhash=%s&mobile=1' % formhash,
'fastloginfield': 'username',
'username': 'l6751902',
'password': 'l35331963',
'submit': '登录',
'questionid': '0',
'answer': '',
'cookietime': '2592000'}
post_data = urllib.urlencode(data) #将post消息化成可以让服务器编码的方式
cJar = cookielib.LWPCookieJar() #获取cookiejar实例
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cJar))
website = 'http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&loginsubmit=yes&loginhash=LnDKp&mobile=yes'
req = urllib2.Request(website, post_data, login_head)
response = opener.open(req)
cJar.save(__cookies_file__)
content = response.read()
gzipped = response.headers.get('Content-Encoding')
if gzipped:
html = zlib.decompress(content, 16+zlib.MAX_WBITS)
else:
html = content
# print html
if 'l6751902' in html:
return True
return False
if __name__ == "__main__":
pass
|
[
"# coding=utf-8\n__author__ = 'kk'\n\n__cookies_file__ = './cookies.dat'\n\nimport scrapy\nfrom scrapy.selector import HtmlXPathSelector\nimport copy\nimport logging\n\nimport urllib2\nimport urllib\nimport cookielib\n\nimport zlib\n\n\nclass TestItem(scrapy.Item):\n id = scrapy.Field()\n name = scrapy.Field()\n img = scrapy.Field()\n description = scrapy.Field()\n\n\nclass MySpider(scrapy.Spider):\n name = 'myspider'\n allowed_domains = ['saraba1st.com'] #可选。包含了spider允许爬取的域名列表\n # rules #CrawlSpider\n # link_extractor 是一个 Link Extractor 对象。 其定义了如何从爬取到的页面提取链接。\n # start_urls = [\n # 'http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1'\n # ]\n base_url = 'http://bbs.saraba1st.com/2b/'\n\n headers = {\n \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8\",\n \"Accept-Encoding\": \"gzip,deflate,sdch\",\n \"Accept-Language\": \"zh-CN,zh;q=0.8\",\n \"Connection\": \"keep-alive\",\n \"Content-Type\":\" application/x-www-form-urlencoded; charset=UTF-8\",\n \"User-Agent\": \"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_2 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8H7 Safari/6533.18.5\",\n }\n\n def parse(self, response):\n self.log('开始解析:%s' % response.url)\n print response.meta\n\n sel = scrapy.selector.Selector(response)\n div_class = sel.xpath('//div[@class=\"bm_c\"]')\n print \"xxxxxxxxx\", div_class.extract()\n\n # sel = scrapy.Selector(response)\n # hxs = HtmlXPathSelector(response)\n #\n # # for h3 in response.xpath('//h3').extract():\n # # yield TestItem(title=h3)\n #\n #\n # # path():返回selectors列表, 每一个select表示一个xpath参数表达式选择的节点.\n # # extract():返回一个unicode字符串,该字符串为XPath选择器返回的数据\n # # re(): 返回unicode字符串列表,字符串作为参数由正则表达式提取出来\n # items = []\n # imgs = response.xpath('//div[@id=\"xcnr_zx\"]//img')\n # # for img in div.xpath('//img/@title').extract():\n # # yield TestItem(img=img)\n # for img in imgs:\n # item = TestItem()\n # item['id'] = img.xpath('@alt').extract()\n # item['img'] = img.xpath('@title').extract()\n # item['name'] = img.xpath('@class').extract()\n # items.append(item)\n # print items\n # return items\n\n # items.extend([self.make_requests_from_url(url).replace(callback=self.parse_post)\n # for url in posts])\n\n\n\n # for url in response.xpath('//a/@href').extract():\n # yield scrapy.Request(url, callback=self.parse)\n\n # def __init__(self, category=None, *args, **kwargs):\n # super(MySpider, self).__init__(*args, **kwargs)\n # self.start_urls = ['http://www.geimian.com/%s' % category]\n\n def start_requests(self):\n self.log('start before,but not run start_urls')\n\n cJar = cookielib.LWPCookieJar()\n file_object = False\n try:\n file_object = open(__cookies_file__)\n cJar._really_load(file_object, file_object, False, False)\n except :\n print 'wenjian bucunzai '\n\n cookiess = dict()\n for item in cJar:\n cookiess[item.name] = item.value\n print cookiess\n\n if file_object:\n file_object.close()\n return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta={'cookiejar': 1}, cookies=cookiess, headers=self.headers, callback=self.check_login)]\n\n def check_login(self, response):\n sel = scrapy.selector.Selector(response)\n div_class = sel.xpath('//div[@class=\"pd2\"]')\n\n # print \"xxxxxxxxx\", div_class.xpath('//a[text()=\"%s\"]/@href' % u'登录').extract()[0]\n if div_class.xpath('//a/text()').extract()[0] == u'登录':\n print '未登录'\n # l6751902\n login_url = self.base_url + div_class.xpath('//a[text()=\"%s\"]/@href' % u'登录').extract()[0].encode('utf-8')\n print 'login_url', login_url\n return [scrapy.Request(login_url, meta = {'cookiejar' : response.meta['cookiejar']}, headers = self.headers, callback = self.logged_in)]\n else:\n print '登录成功 cookie'\n self.parse(response)\n # return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta = {'cookiejar' : 1}, headers = self.headers,dont_filter = True)]\n\n\n def logged_in(self, response):\n sel = scrapy.selector.Selector(response)\n print \"xxxxxxxxx\", sel.xpath('//input[@name=\"formhash\"]').extract()\n formhash = sel.xpath('//input[@name=\"formhash\"]/@value').extract()[0].encode('utf-8')\n self.log(\"formhash:%s\" % formhash, logging.INFO)\n\n login_head = copy.deepcopy(self.headers)\n login_head['Origin'] = 'http://bbs.saraba1st.com'\n login_head['Referer'] = 'http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&mobile=1'\n # http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&loginsubmit=yes&loginhash=LnDKp&mobile=yes\n if self.getCookies(formhash, login_head):\n self.log(\"重新cookies成功!!~~\", logging.INFO)\n self.start_requests()\n else:\n self.log(\"重新获取cookies失败!!~~\", logging.ERROR)\n # return [scrapy.FormRequest.from_response(response,\n # meta = {'cookiejar' : response.meta['cookiejar']},\n # headers = login_head,\n # formdata = {\n # 'formhash': formhash,\n # 'referer':'http://bbs.saraba1st.com/2b/member.php?mod=clearcookies&formhash=%s&mobile=1' % formhash,\n # 'fastloginfield':'username',\n # 'username': 'l6751902',\n # 'password': 'l35331963',\n # 'submit':'登录',\n # 'questionid':'0',\n # 'answer':'',\n # 'cookietime':'2592000'\n # },\n # callback = self.after_login,\n # dont_filter = True\n # )]\n\n def after_login(self, response):\n sel = scrapy.selector.Selector(response)\n div_class = sel.xpath('//div[@class=\"pd2\"]')\n if div_class.xpath('//a/text()').extract()[0] == u'登录':\n self.log('登录失败', logging.WARNING)\n else:\n self.log('登录成功', logging.INFO)\n return [scrapy.Request('http://bbs.saraba1st.com/2b/forum-75-1.html?mobile=1', meta = {'cookiejar' : response.meta['cookiejar']}, headers = self.headers,dont_filter = True)]\n\n\n # def parse_start_url(self, response): #CrawlSpider\n # # 当start_url的请求返回时,该方法被调用。 该方法分析最初的返回值并必须返回一个 Item对象或者 一个 Request 对象或者 一个可迭代的包含二者对象。\n # pass\n\n # def make_requests_from_url(self, url):\n # # 该方法接受一个URL并返回用于爬取的 Request 对象。 该方法在初始化request时被start_requests() 调用,也被用于转化url为request。\n # # 默认未被复写(overridden)的情况下,该方法返回的Request对象中, parse() 作为回调函数,dont_filter参数也被设置为开启。\n # pass\n\n def getCookies(self, formhash, login_head):\n data = {'formhash': formhash,\n 'referer': 'http://bbs.saraba1st.com/2b/member.php?mod=clearcookies&formhash=%s&mobile=1' % formhash,\n 'fastloginfield': 'username',\n 'username': 'l6751902',\n 'password': 'l35331963',\n 'submit': '登录',\n 'questionid': '0',\n 'answer': '',\n 'cookietime': '2592000'}\n post_data = urllib.urlencode(data) #将post消息化成可以让服务器编码的方式\n cJar = cookielib.LWPCookieJar() #获取cookiejar实例\n opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cJar))\n website = 'http://bbs.saraba1st.com/2b/member.php?mod=logging&action=login&loginsubmit=yes&loginhash=LnDKp&mobile=yes'\n req = urllib2.Request(website, post_data, login_head)\n response = opener.open(req)\n cJar.save(__cookies_file__)\n\n content = response.read()\n gzipped = response.headers.get('Content-Encoding')\n if gzipped:\n html = zlib.decompress(content, 16+zlib.MAX_WBITS)\n else:\n html = content\n\n # print html\n if 'l6751902' in html:\n return True\n return False\n\n\n\nif __name__ == \"__main__\":\n pass\n"
] | true |
99,081 |
cfea97561fdbcf6c108e2f3beb4919b9225e6192
|
import numpy as np
import torch
import torchvision.transforms as transform
from PIL import Image
from torchvision import models
from torchsummary import summary
from torchvision.utils import make_grid
import matplotlib.pyplot as plt
import torch.nn as nn
class Transformer(nn.Module):
def __init__(self, input_size, output_size):
super().__init__()
|
[
"import numpy as np\nimport torch\nimport torchvision.transforms as transform\nfrom PIL import Image\nfrom torchvision import models\nfrom torchsummary import summary\nfrom torchvision.utils import make_grid\nimport matplotlib.pyplot as plt\nimport torch.nn as nn\n\nclass Transformer(nn.Module):\n def __init__(self, input_size, output_size):\n super().__init__()",
"import numpy as np\nimport torch\nimport torchvision.transforms as transform\nfrom PIL import Image\nfrom torchvision import models\nfrom torchsummary import summary\nfrom torchvision.utils import make_grid\nimport matplotlib.pyplot as plt\nimport torch.nn as nn\n\n\nclass Transformer(nn.Module):\n\n def __init__(self, input_size, output_size):\n super().__init__()\n",
"<import token>\n\n\nclass Transformer(nn.Module):\n\n def __init__(self, input_size, output_size):\n super().__init__()\n",
"<import token>\n\n\nclass Transformer(nn.Module):\n <function token>\n",
"<import token>\n<class token>\n"
] | false |
99,082 |
77ef6f54da1da89336ea4c430cd1e83e0696ec29
|
from django.db import models
from django.utils import timezone
# Create your models here.
class User(models.Model):
name = models.CharField(max_length=75)
contact_no = models.CharField(max_length=14)
email = models.EmailField()
active_items = models.ForeignKey('Item')
active_bids = models.ForeignKey('Bid')
def __str__(self):
return self.name
class Item(models.Model):
item_name = models.CharField(max_length=75)
item_desc = models.TextField(max_length=1000)
sale_price = models.DecimalField(max_digits=8, decimal_places=2)
post_date = models.DateTimeField('Date Posted ', auto_now_add=True, default=timezone.now())
for_sale = models.BooleanField(default=False)
def __str__(self):
return self.item_name
class Bid(models.Model):
bid_no = models.AutoField(primary_key=True,default=-1)
bid_amt = models.DecimalField(max_digits=8, decimal_places=2)
post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())
accepted = models.BooleanField(default=False)
item_to_sell = models.ForeignKey('Item',default=0)
def __str__(self):
return self.bid_no
|
[
"from django.db import models\nfrom django.utils import timezone\n\n# Create your models here.\nclass User(models.Model):\n\tname = models.CharField(max_length=75)\n\tcontact_no = models.CharField(max_length=14)\n\temail = models.EmailField()\n\tactive_items = models.ForeignKey('Item')\n\tactive_bids = models.ForeignKey('Bid')\n\n\tdef __str__(self):\n\t\treturn self.name\n\nclass Item(models.Model):\n\titem_name = models.CharField(max_length=75)\n\titem_desc = models.TextField(max_length=1000)\n\tsale_price = models.DecimalField(max_digits=8, decimal_places=2)\n\tpost_date = models.DateTimeField('Date Posted ', auto_now_add=True, default=timezone.now())\n\tfor_sale = models.BooleanField(default=False)\n\ndef __str__(self):\n\t\treturn self.item_name\n\nclass Bid(models.Model):\n\tbid_no = models.AutoField(primary_key=True,default=-1)\n\tbid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n\tpost_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n\taccepted = models.BooleanField(default=False)\n\titem_to_sell = models.ForeignKey('Item',default=0)\n\ndef __str__(self):\n\treturn self.bid_no",
"from django.db import models\nfrom django.utils import timezone\n\n\nclass User(models.Model):\n name = models.CharField(max_length=75)\n contact_no = models.CharField(max_length=14)\n email = models.EmailField()\n active_items = models.ForeignKey('Item')\n active_bids = models.ForeignKey('Bid')\n\n def __str__(self):\n return self.name\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\ndef __str__(self):\n return self.item_name\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\ndef __str__(self):\n return self.bid_no\n",
"<import token>\n\n\nclass User(models.Model):\n name = models.CharField(max_length=75)\n contact_no = models.CharField(max_length=14)\n email = models.EmailField()\n active_items = models.ForeignKey('Item')\n active_bids = models.ForeignKey('Bid')\n\n def __str__(self):\n return self.name\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\ndef __str__(self):\n return self.item_name\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\ndef __str__(self):\n return self.bid_no\n",
"<import token>\n\n\nclass User(models.Model):\n name = models.CharField(max_length=75)\n contact_no = models.CharField(max_length=14)\n email = models.EmailField()\n active_items = models.ForeignKey('Item')\n active_bids = models.ForeignKey('Bid')\n\n def __str__(self):\n return self.name\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\ndef __str__(self):\n return self.bid_no\n",
"<import token>\n\n\nclass User(models.Model):\n name = models.CharField(max_length=75)\n contact_no = models.CharField(max_length=14)\n email = models.EmailField()\n active_items = models.ForeignKey('Item')\n active_bids = models.ForeignKey('Bid')\n\n def __str__(self):\n return self.name\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n\n\nclass User(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n def __str__(self):\n return self.name\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n\n\nclass User(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <function token>\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n<class token>\n\n\nclass Item(models.Model):\n item_name = models.CharField(max_length=75)\n item_desc = models.TextField(max_length=1000)\n sale_price = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField('Date Posted ', auto_now_add=True,\n default=timezone.now())\n for_sale = models.BooleanField(default=False)\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n<class token>\n\n\nclass Item(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n<class token>\n<class token>\n<function token>\n\n\nclass Bid(models.Model):\n bid_no = models.AutoField(primary_key=True, default=-1)\n bid_amt = models.DecimalField(max_digits=8, decimal_places=2)\n post_date = models.DateTimeField(auto_now_add=True, default=timezone.now())\n accepted = models.BooleanField(default=False)\n item_to_sell = models.ForeignKey('Item', default=0)\n\n\n<function token>\n",
"<import token>\n<class token>\n<class token>\n<function token>\n\n\nclass Bid(models.Model):\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n <assignment token>\n\n\n<function token>\n",
"<import token>\n<class token>\n<class token>\n<function token>\n<class token>\n<function token>\n"
] | false |
99,083 |
b8de141da71d65665961fdb9044f7e7068156805
|
from .calc_euc import (
get_euc_masks,calc_euc)
from .calc_rho_stf import (
calc_rho_moc, meridional_trsp_at_rho,
calc_rho_section_stf, section_trsp_at_rho,
get_rho_bins)
from .llcmap import atlantic_map
from .read_mds import read_mds, read_single_mds
from .plot_2d import (
global_and_stereo_map, plot_depth_slice)
__all__ = [
'llcmap',
'calc_euc',
'calc_rho_moc',
'meridional_trsp_at_rho',
'calc_rho_section_stf',
'section_trsp_at_rho',
'get_rho_bins',
'read_mds',
'read_single_mds',
'global_and_stereo_map',
'plot_depth_slice',
]
|
[
"from .calc_euc import (\n get_euc_masks,calc_euc)\nfrom .calc_rho_stf import (\n calc_rho_moc, meridional_trsp_at_rho,\n calc_rho_section_stf, section_trsp_at_rho,\n get_rho_bins)\n\nfrom .llcmap import atlantic_map\n\nfrom .read_mds import read_mds, read_single_mds\n\nfrom .plot_2d import (\n global_and_stereo_map, plot_depth_slice)\n\n__all__ = [\n 'llcmap',\n 'calc_euc',\n 'calc_rho_moc',\n 'meridional_trsp_at_rho',\n 'calc_rho_section_stf',\n 'section_trsp_at_rho',\n 'get_rho_bins',\n 'read_mds',\n 'read_single_mds',\n 'global_and_stereo_map',\n 'plot_depth_slice',\n]\n",
"from .calc_euc import get_euc_masks, calc_euc\nfrom .calc_rho_stf import calc_rho_moc, meridional_trsp_at_rho, calc_rho_section_stf, section_trsp_at_rho, get_rho_bins\nfrom .llcmap import atlantic_map\nfrom .read_mds import read_mds, read_single_mds\nfrom .plot_2d import global_and_stereo_map, plot_depth_slice\n__all__ = ['llcmap', 'calc_euc', 'calc_rho_moc', 'meridional_trsp_at_rho',\n 'calc_rho_section_stf', 'section_trsp_at_rho', 'get_rho_bins',\n 'read_mds', 'read_single_mds', 'global_and_stereo_map', 'plot_depth_slice']\n",
"<import token>\n__all__ = ['llcmap', 'calc_euc', 'calc_rho_moc', 'meridional_trsp_at_rho',\n 'calc_rho_section_stf', 'section_trsp_at_rho', 'get_rho_bins',\n 'read_mds', 'read_single_mds', 'global_and_stereo_map', 'plot_depth_slice']\n",
"<import token>\n<assignment token>\n"
] | false |
99,084 |
86ff629384eaa3021c714d622be5484a72efadd3
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['test_create_mutation 1'] = {
'data': {
'createProduct': {
'product': {
'id': '1',
'images': [
{
'photo': '/media/test/photo_1.jpg',
'photoThumbnail': '/media/test/CACHE/images/photo_1/1c92775827def316cb8d32c40de1bb70.jpg'
},
{
'photo': '/media/test/photo_2.jpg',
'photoThumbnail': '/media/test/CACHE/images/photo_2/2929c69a42230a9d8aee6e969c0e7669.jpg'
}
],
'name': 'product'
}
}
}
}
|
[
"# -*- coding: utf-8 -*-\n# snapshottest: v1 - https://goo.gl/zC4yUc\nfrom __future__ import unicode_literals\n\nfrom snapshottest import Snapshot\n\n\nsnapshots = Snapshot()\n\nsnapshots['test_create_mutation 1'] = {\n 'data': {\n 'createProduct': {\n 'product': {\n 'id': '1',\n 'images': [\n {\n 'photo': '/media/test/photo_1.jpg',\n 'photoThumbnail': '/media/test/CACHE/images/photo_1/1c92775827def316cb8d32c40de1bb70.jpg'\n },\n {\n 'photo': '/media/test/photo_2.jpg',\n 'photoThumbnail': '/media/test/CACHE/images/photo_2/2929c69a42230a9d8aee6e969c0e7669.jpg'\n }\n ],\n 'name': 'product'\n }\n }\n }\n}\n",
"from __future__ import unicode_literals\nfrom snapshottest import Snapshot\nsnapshots = Snapshot()\nsnapshots['test_create_mutation 1'] = {'data': {'createProduct': {'product':\n {'id': '1', 'images': [{'photo': '/media/test/photo_1.jpg',\n 'photoThumbnail':\n '/media/test/CACHE/images/photo_1/1c92775827def316cb8d32c40de1bb70.jpg'\n }, {'photo': '/media/test/photo_2.jpg', 'photoThumbnail':\n '/media/test/CACHE/images/photo_2/2929c69a42230a9d8aee6e969c0e7669.jpg'\n }], 'name': 'product'}}}}\n",
"<import token>\nsnapshots = Snapshot()\nsnapshots['test_create_mutation 1'] = {'data': {'createProduct': {'product':\n {'id': '1', 'images': [{'photo': '/media/test/photo_1.jpg',\n 'photoThumbnail':\n '/media/test/CACHE/images/photo_1/1c92775827def316cb8d32c40de1bb70.jpg'\n }, {'photo': '/media/test/photo_2.jpg', 'photoThumbnail':\n '/media/test/CACHE/images/photo_2/2929c69a42230a9d8aee6e969c0e7669.jpg'\n }], 'name': 'product'}}}}\n",
"<import token>\n<assignment token>\n"
] | false |
99,085 |
c80fafa9482d4eecf46b74e1bea3b7e07cb185b6
|
# Generated by Django 3.1.1 on 2020-10-15 15:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('employees', '0002_employee_picture'),
]
operations = [
migrations.AddField(
model_name='employee',
name='dateFired',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='employee',
name='dateHired',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='employee',
name='notes',
field=models.TextField(blank=True),
),
]
|
[
"# Generated by Django 3.1.1 on 2020-10-15 15:44\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('employees', '0002_employee_picture'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='employee',\n name='dateFired',\n field=models.DateTimeField(blank=True, null=True),\n ),\n migrations.AddField(\n model_name='employee',\n name='dateHired',\n field=models.DateTimeField(blank=True, null=True),\n ),\n migrations.AddField(\n model_name='employee',\n name='notes',\n field=models.TextField(blank=True),\n ),\n ]\n",
"from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('employees', '0002_employee_picture')]\n operations = [migrations.AddField(model_name='employee', name=\n 'dateFired', field=models.DateTimeField(blank=True, null=True)),\n migrations.AddField(model_name='employee', name='dateHired', field=\n models.DateTimeField(blank=True, null=True)), migrations.AddField(\n model_name='employee', name='notes', field=models.TextField(blank=\n True))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('employees', '0002_employee_picture')]\n operations = [migrations.AddField(model_name='employee', name=\n 'dateFired', field=models.DateTimeField(blank=True, null=True)),\n migrations.AddField(model_name='employee', name='dateHired', field=\n models.DateTimeField(blank=True, null=True)), migrations.AddField(\n model_name='employee', name='notes', field=models.TextField(blank=\n True))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
99,086 |
9b583054ec5a0afe1b4ee4ce5c05eda450037fb4
|
import pandas as pdb
import numpy as np
import scipy.io as spio
import matplotlib.pyplot as plt
from matplotlib import cm
import sys
import mysql.connector
fig = plt.figure()
ax1 = fig.add_subplot(221)
ax1.set_axis_bgcolor('grey')
ax1.axis("Off")
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223)
ax4 = fig.add_subplot(224)
ax1.title.set_text('First Plot')
ax2.title.set_text('Second Plot')
ax3.title.set_text('Third Plot')
ax4.title.set_text('Fourth Plot')
plt.show()
x = np.linspace(0, 2 * np.pi, 400)
y = np.sin(x ** 2)
# Four axes, returned as a 2-d array
f, axarr = plt.subplots(2, 2)
axarr[0, 0].plot(x, y)
axarr[0, 0].set_title('Axis [0,0]')
axarr[0, 0].set_yticklabels([])
axarr[0, 0].set_xticklabels([])
axarr[0, 0].set_axis_bgcolor('white')
axarr[0, 1].scatter(x, y)
axarr[0, 1].set_title('Axis [0,1]')
axarr[0, 0].set_axis_bgcolor('grey')
#axarr[0, 1].axis("Off")
axarr[1, 0].plot(x, y ** 2)
axarr[1, 0].set_title('Axis [1,0]')
#axarr[1, 0].axis("Off")
axarr[1, 1].scatter(x, y ** 2)
axarr[1, 1].set_title('Axis [1,1]')
#axarr[1, 1].axis("Off")
# Fine-tune figure; hide x ticks for top plots and y ticks for right plots
#plt.setp([a.get_xticklabels() for a in axarr[0, :]], visible=False)
#plt.setp([a.get_yticklabels() for a in axarr[:, 1]], visible=False)
plt.show()
|
[
"import pandas as pdb\nimport numpy as np\nimport scipy.io as spio\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\nimport sys\nimport mysql.connector\n\nfig = plt.figure()\nax1 = fig.add_subplot(221)\nax1.set_axis_bgcolor('grey')\nax1.axis(\"Off\")\nax2 = fig.add_subplot(222)\nax3 = fig.add_subplot(223)\nax4 = fig.add_subplot(224)\nax1.title.set_text('First Plot')\nax2.title.set_text('Second Plot')\nax3.title.set_text('Third Plot')\nax4.title.set_text('Fourth Plot')\nplt.show()\n\n\n\nx = np.linspace(0, 2 * np.pi, 400)\ny = np.sin(x ** 2)\n\n# Four axes, returned as a 2-d array\nf, axarr = plt.subplots(2, 2)\naxarr[0, 0].plot(x, y)\naxarr[0, 0].set_title('Axis [0,0]')\naxarr[0, 0].set_yticklabels([])\naxarr[0, 0].set_xticklabels([])\n\naxarr[0, 0].set_axis_bgcolor('white')\n\naxarr[0, 1].scatter(x, y)\naxarr[0, 1].set_title('Axis [0,1]')\naxarr[0, 0].set_axis_bgcolor('grey')\n#axarr[0, 1].axis(\"Off\")\naxarr[1, 0].plot(x, y ** 2)\naxarr[1, 0].set_title('Axis [1,0]')\n#axarr[1, 0].axis(\"Off\")\naxarr[1, 1].scatter(x, y ** 2)\naxarr[1, 1].set_title('Axis [1,1]')\n#axarr[1, 1].axis(\"Off\")\n# Fine-tune figure; hide x ticks for top plots and y ticks for right plots\n#plt.setp([a.get_xticklabels() for a in axarr[0, :]], visible=False)\n#plt.setp([a.get_yticklabels() for a in axarr[:, 1]], visible=False)\n\nplt.show()",
"import pandas as pdb\nimport numpy as np\nimport scipy.io as spio\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\nimport sys\nimport mysql.connector\nfig = plt.figure()\nax1 = fig.add_subplot(221)\nax1.set_axis_bgcolor('grey')\nax1.axis('Off')\nax2 = fig.add_subplot(222)\nax3 = fig.add_subplot(223)\nax4 = fig.add_subplot(224)\nax1.title.set_text('First Plot')\nax2.title.set_text('Second Plot')\nax3.title.set_text('Third Plot')\nax4.title.set_text('Fourth Plot')\nplt.show()\nx = np.linspace(0, 2 * np.pi, 400)\ny = np.sin(x ** 2)\nf, axarr = plt.subplots(2, 2)\naxarr[0, 0].plot(x, y)\naxarr[0, 0].set_title('Axis [0,0]')\naxarr[0, 0].set_yticklabels([])\naxarr[0, 0].set_xticklabels([])\naxarr[0, 0].set_axis_bgcolor('white')\naxarr[0, 1].scatter(x, y)\naxarr[0, 1].set_title('Axis [0,1]')\naxarr[0, 0].set_axis_bgcolor('grey')\naxarr[1, 0].plot(x, y ** 2)\naxarr[1, 0].set_title('Axis [1,0]')\naxarr[1, 1].scatter(x, y ** 2)\naxarr[1, 1].set_title('Axis [1,1]')\nplt.show()\n",
"<import token>\nfig = plt.figure()\nax1 = fig.add_subplot(221)\nax1.set_axis_bgcolor('grey')\nax1.axis('Off')\nax2 = fig.add_subplot(222)\nax3 = fig.add_subplot(223)\nax4 = fig.add_subplot(224)\nax1.title.set_text('First Plot')\nax2.title.set_text('Second Plot')\nax3.title.set_text('Third Plot')\nax4.title.set_text('Fourth Plot')\nplt.show()\nx = np.linspace(0, 2 * np.pi, 400)\ny = np.sin(x ** 2)\nf, axarr = plt.subplots(2, 2)\naxarr[0, 0].plot(x, y)\naxarr[0, 0].set_title('Axis [0,0]')\naxarr[0, 0].set_yticklabels([])\naxarr[0, 0].set_xticklabels([])\naxarr[0, 0].set_axis_bgcolor('white')\naxarr[0, 1].scatter(x, y)\naxarr[0, 1].set_title('Axis [0,1]')\naxarr[0, 0].set_axis_bgcolor('grey')\naxarr[1, 0].plot(x, y ** 2)\naxarr[1, 0].set_title('Axis [1,0]')\naxarr[1, 1].scatter(x, y ** 2)\naxarr[1, 1].set_title('Axis [1,1]')\nplt.show()\n",
"<import token>\n<assignment token>\nax1.set_axis_bgcolor('grey')\nax1.axis('Off')\n<assignment token>\nax1.title.set_text('First Plot')\nax2.title.set_text('Second Plot')\nax3.title.set_text('Third Plot')\nax4.title.set_text('Fourth Plot')\nplt.show()\n<assignment token>\naxarr[0, 0].plot(x, y)\naxarr[0, 0].set_title('Axis [0,0]')\naxarr[0, 0].set_yticklabels([])\naxarr[0, 0].set_xticklabels([])\naxarr[0, 0].set_axis_bgcolor('white')\naxarr[0, 1].scatter(x, y)\naxarr[0, 1].set_title('Axis [0,1]')\naxarr[0, 0].set_axis_bgcolor('grey')\naxarr[1, 0].plot(x, y ** 2)\naxarr[1, 0].set_title('Axis [1,0]')\naxarr[1, 1].scatter(x, y ** 2)\naxarr[1, 1].set_title('Axis [1,1]')\nplt.show()\n",
"<import token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,087 |
c6d648818992030617324df52815216fbd1dc64d
|
# coding=utf-8
# 型号:mpu6050
import smbus
class GyroscopeDriver:
# 全局变量
GRAVITIY_MS2 = 9.80665
address = None
bus = None
# 缩放修饰符
# 加速度计灵敏度
ACCEL_SCALE_MODIFIER_2G = 16384.0
ACCEL_SCALE_MODIFIER_4G = 8192.0
ACCEL_SCALE_MODIFIER_8G = 4096.0
ACCEL_SCALE_MODIFIER_16G = 2048.0
# 陀螺仪灵敏度
GYRO_SCALE_MODIFIER_250DEG = 131.0
GYRO_SCALE_MODIFIER_500DEG = 65.5
GYRO_SCALE_MODIFIER_1000DEG = 32.8
GYRO_SCALE_MODIFIER_2000DEG = 16.4
# 预定义范围
ACCEL_RANGE_2G = 0x00
ACCEL_RANGE_4G = 0x08
ACCEL_RANGE_8G = 0x10
ACCEL_RANGE_16G = 0x18
GYRO_RANGE_250DEG = 0x00
GYRO_RANGE_500DEG = 0x08
GYRO_RANGE_1000DEG = 0x10
GYRO_RANGE_2000DEG = 0x18
# MPU-6050 寄存器
PWR_MGMT_1 = 0x6B
PWR_MGMT_2 = 0x6C
ACCEL_XOUT0 = 0x3B
ACCEL_YOUT0 = 0x3D
ACCEL_ZOUT0 = 0x3F
TEMP_OUT0 = 0x41
GYRO_XOUT0 = 0x43
GYRO_YOUT0 = 0x45
GYRO_ZOUT0 = 0x47
ACCEL_CONFIG = 0x1C
GYRO_CONFIG = 0x1B
def __init__(self, address=0x68, bus=1):
self.address = address
self.bus = smbus.SMBus(bus)
# 唤醒 MPU-6050
self.bus.write_byte_data(self.address, self.PWR_MGMT_1, 0x00)
# I2C 通信方法
def read_i2c_word(self, register):
# 读取两个寄存器中的值并合并
high = self.bus.read_byte_data(self.address, register)
low = self.bus.read_byte_data(self.address, register + 1)
value = (high << 8) + low
if value >= 0x8000:
return -((65535 - value) + 1)
else:
return value
# MPU-6050 Methods
def get_temp(self):
# 读取温度计的值,并以摄氏度返回
raw_temp = self.read_i2c_word(self.TEMP_OUT0)
# 得到实际温度
actual_temp = (raw_temp / 340.0) + 36.53
return actual_temp
def set_accel_range(self, accel_range):
# 设置加速度计的范围
# 先清空
self.bus.write_byte_data(self.address, self.ACCEL_CONFIG, 0x00)
# 设置新值
self.bus.write_byte_data(self.address, self.ACCEL_CONFIG, accel_range)
def read_accel_range(self, raw=False):
# 读取加速度计的范围
# 如果raw为真,返回原始值
# 否则根据型号返回值
raw_data = self.bus.read_byte_data(self.address, self.ACCEL_CONFIG)
if raw is True:
return raw_data
elif raw is False:
if raw_data == self.ACCEL_RANGE_2G:
return 2
elif raw_data == self.ACCEL_RANGE_4G:
return 4
elif raw_data == self.ACCEL_RANGE_8G:
return 8
elif raw_data == self.ACCEL_RANGE_16G:
return 16
else:
return -1
def get_accel_data(self, g=False):
# 获取加速度计中的数据
# 如果g为真,返回g中的数据,否则以 m /s^2 返回
x = self.read_i2c_word(self.ACCEL_XOUT0)
y = self.read_i2c_word(self.ACCEL_YOUT0)
z = self.read_i2c_word(self.ACCEL_ZOUT0)
accel_scale_modifier = None
accel_range = self.read_accel_range(True)
if accel_range == self.ACCEL_RANGE_2G:
accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G
elif accel_range == self.ACCEL_RANGE_4G:
accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_4G
elif accel_range == self.ACCEL_RANGE_8G:
accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_8G
elif accel_range == self.ACCEL_RANGE_16G:
accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_16G
else:
print("Unkown range - accel_scale_modifier set to self.ACCEL_SCALE_MODIFIER_2G")
accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G
x = x / accel_scale_modifier
y = y / accel_scale_modifier
z = z / accel_scale_modifier
if g is True:
return {'x': x, 'y': y, 'z': z}
elif g is False:
x = x * self.GRAVITIY_MS2
y = y * self.GRAVITIY_MS2
z = z * self.GRAVITIY_MS2
return {'x': x, 'y': y, 'z': z}
def set_gyro_range(self, gyro_range):
# 设置陀螺仪范围
# 先设置为0
self.bus.write_byte_data(self.address, self.GYRO_CONFIG, 0x00)
# 设置新值
self.bus.write_byte_data(self.address, self.GYRO_CONFIG, gyro_range)
def read_gyro_range(self, raw=False):
# 读取陀螺仪范围
# 如果raw为真,返回原始值
# 如果raw为假,根据型号返回值
raw_data = self.bus.read_byte_data(self.address, self.GYRO_CONFIG)
if raw is True:
return raw_data
elif raw is False:
if raw_data == self.GYRO_RANGE_250DEG:
return 250
elif raw_data == self.GYRO_RANGE_500DEG:
return 500
elif raw_data == self.GYRO_RANGE_1000DEG:
return 1000
elif raw_data == self.GYRO_RANGE_2000DEG:
return 2000
else:
return -1
def get_gyro_data(self):
# 读取陀螺仪中的数据
x = self.read_i2c_word(self.GYRO_XOUT0)
y = self.read_i2c_word(self.GYRO_YOUT0)
z = self.read_i2c_word(self.GYRO_ZOUT0)
gyro_scale_modifier = None
gyro_range = self.read_gyro_range(True)
if gyro_range == self.GYRO_RANGE_250DEG:
gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG
elif gyro_range == self.GYRO_RANGE_500DEG:
gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_500DEG
elif gyro_range == self.GYRO_RANGE_1000DEG:
gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_1000DEG
elif gyro_range == self.GYRO_RANGE_2000DEG:
gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_2000DEG
else:
print("Unkown range - gyro_scale_modifier set to self.GYRO_SCALE_MODIFIER_250DEG")
gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG
x = x / gyro_scale_modifier
y = y / gyro_scale_modifier
z = z / gyro_scale_modifier
return {'x': x, 'y': y, 'z': z}
def get_all_data(self):
# 返回所有可以获得的值
temp = self.get_temp()
accel = self.get_accel_data()
gyro = self.get_gyro_data()
return [accel, gyro, temp]
if __name__ == "__main__":
mpu = GyroscopeDriver(0x68)
print(mpu.get_temp())
accel_data = mpu.get_accel_data()
print "accel_data:"
print(accel_data['x'])
print(accel_data['y'])
print(accel_data['z'])
gyro_data = mpu.get_gyro_data()
print "gyro_data:"
print(gyro_data['x'])
print(gyro_data['y'])
print(gyro_data['z'])
|
[
"# coding=utf-8\r\n# 型号:mpu6050\r\nimport smbus\r\n\r\n\r\nclass GyroscopeDriver:\r\n\r\n # 全局变量\r\n GRAVITIY_MS2 = 9.80665\r\n address = None\r\n bus = None\r\n\r\n # 缩放修饰符\r\n # 加速度计灵敏度\r\n ACCEL_SCALE_MODIFIER_2G = 16384.0\r\n ACCEL_SCALE_MODIFIER_4G = 8192.0\r\n ACCEL_SCALE_MODIFIER_8G = 4096.0\r\n ACCEL_SCALE_MODIFIER_16G = 2048.0\r\n\r\n # 陀螺仪灵敏度\r\n GYRO_SCALE_MODIFIER_250DEG = 131.0\r\n GYRO_SCALE_MODIFIER_500DEG = 65.5\r\n GYRO_SCALE_MODIFIER_1000DEG = 32.8\r\n GYRO_SCALE_MODIFIER_2000DEG = 16.4\r\n\r\n # 预定义范围\r\n ACCEL_RANGE_2G = 0x00\r\n ACCEL_RANGE_4G = 0x08\r\n ACCEL_RANGE_8G = 0x10\r\n ACCEL_RANGE_16G = 0x18\r\n\r\n GYRO_RANGE_250DEG = 0x00\r\n GYRO_RANGE_500DEG = 0x08\r\n GYRO_RANGE_1000DEG = 0x10\r\n GYRO_RANGE_2000DEG = 0x18\r\n\r\n # MPU-6050 寄存器\r\n PWR_MGMT_1 = 0x6B\r\n PWR_MGMT_2 = 0x6C\r\n\r\n ACCEL_XOUT0 = 0x3B\r\n ACCEL_YOUT0 = 0x3D\r\n ACCEL_ZOUT0 = 0x3F\r\n\r\n TEMP_OUT0 = 0x41\r\n\r\n GYRO_XOUT0 = 0x43\r\n GYRO_YOUT0 = 0x45\r\n GYRO_ZOUT0 = 0x47\r\n\r\n ACCEL_CONFIG = 0x1C\r\n GYRO_CONFIG = 0x1B\r\n\r\n def __init__(self, address=0x68, bus=1):\r\n self.address = address\r\n self.bus = smbus.SMBus(bus)\r\n # 唤醒 MPU-6050\r\n self.bus.write_byte_data(self.address, self.PWR_MGMT_1, 0x00)\r\n\r\n # I2C 通信方法\r\n\r\n def read_i2c_word(self, register):\r\n # 读取两个寄存器中的值并合并\r\n high = self.bus.read_byte_data(self.address, register)\r\n low = self.bus.read_byte_data(self.address, register + 1)\r\n\r\n value = (high << 8) + low\r\n\r\n if value >= 0x8000:\r\n return -((65535 - value) + 1)\r\n else:\r\n return value\r\n\r\n # MPU-6050 Methods\r\n\r\n def get_temp(self):\r\n # 读取温度计的值,并以摄氏度返回\r\n raw_temp = self.read_i2c_word(self.TEMP_OUT0)\r\n\r\n # 得到实际温度\r\n actual_temp = (raw_temp / 340.0) + 36.53\r\n\r\n return actual_temp\r\n\r\n def set_accel_range(self, accel_range):\r\n # 设置加速度计的范围\r\n # 先清空\r\n self.bus.write_byte_data(self.address, self.ACCEL_CONFIG, 0x00)\r\n\r\n # 设置新值\r\n self.bus.write_byte_data(self.address, self.ACCEL_CONFIG, accel_range)\r\n\r\n def read_accel_range(self, raw=False):\r\n # 读取加速度计的范围\r\n # 如果raw为真,返回原始值\r\n # 否则根据型号返回值\r\n raw_data = self.bus.read_byte_data(self.address, self.ACCEL_CONFIG)\r\n\r\n if raw is True:\r\n return raw_data\r\n elif raw is False:\r\n if raw_data == self.ACCEL_RANGE_2G:\r\n return 2\r\n elif raw_data == self.ACCEL_RANGE_4G:\r\n return 4\r\n elif raw_data == self.ACCEL_RANGE_8G:\r\n return 8\r\n elif raw_data == self.ACCEL_RANGE_16G:\r\n return 16\r\n else:\r\n return -1\r\n\r\n def get_accel_data(self, g=False):\r\n # 获取加速度计中的数据\r\n # 如果g为真,返回g中的数据,否则以 m /s^2 返回\r\n x = self.read_i2c_word(self.ACCEL_XOUT0)\r\n y = self.read_i2c_word(self.ACCEL_YOUT0)\r\n z = self.read_i2c_word(self.ACCEL_ZOUT0)\r\n\r\n accel_scale_modifier = None\r\n accel_range = self.read_accel_range(True)\r\n\r\n if accel_range == self.ACCEL_RANGE_2G:\r\n accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G\r\n elif accel_range == self.ACCEL_RANGE_4G:\r\n accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_4G\r\n elif accel_range == self.ACCEL_RANGE_8G:\r\n accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_8G\r\n elif accel_range == self.ACCEL_RANGE_16G:\r\n accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_16G\r\n else:\r\n print(\"Unkown range - accel_scale_modifier set to self.ACCEL_SCALE_MODIFIER_2G\")\r\n accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G\r\n\r\n x = x / accel_scale_modifier\r\n y = y / accel_scale_modifier\r\n z = z / accel_scale_modifier\r\n\r\n if g is True:\r\n return {'x': x, 'y': y, 'z': z}\r\n elif g is False:\r\n x = x * self.GRAVITIY_MS2\r\n y = y * self.GRAVITIY_MS2\r\n z = z * self.GRAVITIY_MS2\r\n return {'x': x, 'y': y, 'z': z}\r\n\r\n def set_gyro_range(self, gyro_range):\r\n # 设置陀螺仪范围\r\n # 先设置为0\r\n self.bus.write_byte_data(self.address, self.GYRO_CONFIG, 0x00)\r\n\r\n # 设置新值\r\n self.bus.write_byte_data(self.address, self.GYRO_CONFIG, gyro_range)\r\n\r\n def read_gyro_range(self, raw=False):\r\n # 读取陀螺仪范围\r\n # 如果raw为真,返回原始值\r\n # 如果raw为假,根据型号返回值\r\n raw_data = self.bus.read_byte_data(self.address, self.GYRO_CONFIG)\r\n\r\n if raw is True:\r\n return raw_data\r\n elif raw is False:\r\n if raw_data == self.GYRO_RANGE_250DEG:\r\n return 250\r\n elif raw_data == self.GYRO_RANGE_500DEG:\r\n return 500\r\n elif raw_data == self.GYRO_RANGE_1000DEG:\r\n return 1000\r\n elif raw_data == self.GYRO_RANGE_2000DEG:\r\n return 2000\r\n else:\r\n return -1\r\n\r\n def get_gyro_data(self):\r\n # 读取陀螺仪中的数据\r\n x = self.read_i2c_word(self.GYRO_XOUT0)\r\n y = self.read_i2c_word(self.GYRO_YOUT0)\r\n z = self.read_i2c_word(self.GYRO_ZOUT0)\r\n\r\n gyro_scale_modifier = None\r\n gyro_range = self.read_gyro_range(True)\r\n\r\n if gyro_range == self.GYRO_RANGE_250DEG:\r\n gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG\r\n elif gyro_range == self.GYRO_RANGE_500DEG:\r\n gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_500DEG\r\n elif gyro_range == self.GYRO_RANGE_1000DEG:\r\n gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_1000DEG\r\n elif gyro_range == self.GYRO_RANGE_2000DEG:\r\n gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_2000DEG\r\n else:\r\n print(\"Unkown range - gyro_scale_modifier set to self.GYRO_SCALE_MODIFIER_250DEG\")\r\n gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG\r\n\r\n x = x / gyro_scale_modifier\r\n y = y / gyro_scale_modifier\r\n z = z / gyro_scale_modifier\r\n\r\n return {'x': x, 'y': y, 'z': z}\r\n\r\n def get_all_data(self):\r\n # 返回所有可以获得的值\r\n temp = self.get_temp()\r\n accel = self.get_accel_data()\r\n gyro = self.get_gyro_data()\r\n\r\n return [accel, gyro, temp]\r\n\r\n\r\nif __name__ == \"__main__\":\r\n mpu = GyroscopeDriver(0x68)\r\n print(mpu.get_temp())\r\n accel_data = mpu.get_accel_data()\r\n print \"accel_data:\"\r\n print(accel_data['x'])\r\n print(accel_data['y'])\r\n print(accel_data['z'])\r\n gyro_data = mpu.get_gyro_data()\r\n print \"gyro_data:\"\r\n print(gyro_data['x'])\r\n print(gyro_data['y'])\r\n print(gyro_data['z'])\r\n"
] | true |
99,088 |
f899a67e5f438ace462f4a4436318920381393ba
|
num = int(input("Enter number \n"))
if(num==0 or num>0 or num<0):
if(num==0):
print(num ,"is ZERO")
elif(num>=1):
print(f"{num} Number is Positive")
else:
print(f"{num} Number is Negative")
|
[
"num = int(input(\"Enter number \\n\"))\r\n\r\nif(num==0 or num>0 or num<0):\r\n if(num==0):\r\n print(num ,\"is ZERO\")\r\n elif(num>=1):\r\n print(f\"{num} Number is Positive\")\r\n else:\r\n print(f\"{num} Number is Negative\") ",
"num = int(input('Enter number \\n'))\nif num == 0 or num > 0 or num < 0:\n if num == 0:\n print(num, 'is ZERO')\n elif num >= 1:\n print(f'{num} Number is Positive')\n else:\n print(f'{num} Number is Negative')\n",
"<assignment token>\nif num == 0 or num > 0 or num < 0:\n if num == 0:\n print(num, 'is ZERO')\n elif num >= 1:\n print(f'{num} Number is Positive')\n else:\n print(f'{num} Number is Negative')\n",
"<assignment token>\n<code token>\n"
] | false |
99,089 |
37c8f8d49e5e8703d458b1e022ba19bcab1fc59b
|
""" The goal of this file is to have all the information on a graph. """
import sys, os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
import numpy as np
import matplotlib.pyplot as plt
import copy
import exceptions
inf = np.inf
#Change this bool according to the situation
non_oriented = False
class Vertice:
"""" All the information of one vertice are contained here. """
def __init__(self, index, coordinates):
""" Entry : index of the vertice in list_of_vertices
and the coordinates of the vertice. """
self.index = index
self.coordinates = np.array([coordinates[0],coordinates[1]])
self._edges_list = [] # no neighbour by default
self.priority = inf # priority by default
self.visited = False # vertice is by default not visited
self.cost_dijkstra = inf # cost is by default inf
self.antecedent = -inf # antecedent not defined before using Dijkstra
#implemente apres passage par panda
self.index_edges_list = []#seulement implemente apres passage par panda
#database
self.id = None #identifiant idfm de la gare
self.gare_name = None #nom de la gare
self.color = None #couleur de la gare
self.is_a_station= True # boolean True, si le noeud est veritablement une gare. False sinon
def get_lines_connected(self):
list_of_line = []
for edge in self._edges_list:
if edge.id not in list_of_line:
list_of_line.append(edge.id)
return list_of_line
@property
def edges_list(self):
""" Returns the list of neighbour. """
return self._edges_list
# We suppose that the index and the coordinates never change.
# The other properties can.
@edges_list.setter
def edges_list(self, edges_list):
""" An element of edges_list is an edge """
for e in edges_list:
exceptions.check_pertinent_edge(self, e)
self._edges_list = edges_list
def neighbours_list(self, list_tuple, id=0):
self._edges_list.clear()
"""interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple """
for tuple in list_tuple:
E = Edge(self, tuple[0], id, tuple[1])
self._edges_list.append(E)
def number_of_neighbours(self):
return len(self._edges_list)
def is_linked(self, other):
"""returns True if there is an edge between self and other"""
for edge in self._edges_list:
if other.index == edge.linked[1].index:
return True
return False
def push_edge(self, edge, coords_verif=False):
if coords_verif:
exceptions.check_pertinent_edge_coords_verif(self, edge)
else:
exceptions.check_pertinent_edge(self, edge)
self._edges_list.append(edge)
"""
def cost_between(self, other):
for edge in self._edges_list:
[vertice, vertice_voisin] = edge.linked
if vertice_voisin == other:
return edge.given_cost"""
def __repr__(self):
return f"Vertice {str(self.index)}"
def __lt__(self, other):
return self.priority < other.priority
class Edge:
def __init__(self, vertice1, vertice2, id, given_cost=0):
self.linked = [vertice1,vertice2]
self.id = id #identifiant de la liaison. ici id=nom de la ligne a laqualle appartient la liaison
self._given_cost = given_cost #cout de deplacement de la liason donne par l'utilisateur ou la base de donnee
#data_base
self.color=None #couleur de la liason
self.connection_with_displayable=None #indice de la liason developpee( trace reel) dans la table de connection connection_table_edge_and_diplayable_edge de la classe graph
self.index=None
def set_given_cost(self,cost):
self._given_cost=cost
#ne pas mettre @property ici, on veut une methode pas un attribut
def euclidian_cost(self):
return np.sqrt(self.square_euclidian_cost())
def square_euclidian_cost(self):
return np.dot(np.transpose(self.linked[0].coordinates-self.linked[1].coordinates),(self.linked[0].coordinates-self.linked[1].coordinates))
def customized_cost1(self):
V_metro = 25.1 / 3.6 #vitesse moyenne en km/h /3.6 -> vitesse moyenne en m/s
V_train = 49.6 / 3.6
V_tram = 18 / 3.6
V_pieton = 4 / 3.6
if self.id in ["A","B","C","D","E","H","I","J","K","L","M","N","P","R","U","TER","GL"]:
return self._given_cost/V_train
if self.id in [str(i) for i in range(1,15)]+["ORL","CDG","3b","7b"]:
return self._given_cost/V_metro
if self.id in ["T"+str(i) for i in range(1,12)]+["T3A","T3B","FUN"]:
return self._given_cost/V_tram
if self.id in ["RER Walk"]:
return self._given_cost/V_pieton
raise ValueError(" Dans customized_cost1 " +self.id+" non pris en compte dans le calcul de distance")
def __eq__(self,other):
"""2 edges are equal iff same cordinates and same id """
boul0 = self.linked[0].coordinates[0]==other.linked[0].coordinates[0] and self.linked[0].coordinates[1]==other.linked[0].coordinates[1]
boul1 = self.linked[1].coordinates[0]==other.linked[1].coordinates[0] and self.linked[1].coordinates[1]==other.linked[1].coordinates[1]
boulid = self.id==other.id
return boul0 and boul1 and boulid
def __ne__(self,other):
"""2 edges are not equal iff they are not equal :) """
return (self==other)==False
#ne pas mettre @property ici, on veut une methode pas un attribut
def given_cost(self):
return self._given_cost
def __repr__(self):
return f"Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!"
class Graph:
""" All the information of a graph are contained here. """
def __init__(self,list_of_vertices):
""" Entry : the list of vertices. """
self.list_of_vertices = list_of_vertices
self.number_of_vertices = len(list_of_vertices)
self.connection_table_edge_and_diplayable_edge=[]
self.list_of_edges=[]
self.number_of_disp_edges=0
self.number_of_edges=0
def push_diplayable_edge(self,bidim_array):
self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy(bidim_array))
self.number_of_disp_edges+=1
def push_edge(self,e):
self.number_of_edges+=1
self.list_of_edges.append(e)
def push_edge_without_doublons(self, e):
if e not in self.list_of_edges:
self.number_of_edges+=1
self.list_of_edges.append(e)
def push_vertice(self,vertice):
self.list_of_vertices.append(vertice)
self.number_of_vertices += 1
def push_vertice_without_doublons(self, vertice):
bool,index = self.is_vertice_in_graph_based_on_xy_with_tolerance(vertice,10**(-8))
#bool,index = self.is_vertice_in_graph_based_on_xy(vertice)
if bool == False:
self.push_vertice(vertice)
else:
vertice.coordinates=self.list_of_vertices[index].coordinates
for edge in vertice.edges_list:
if edge not in self.list_of_vertices[index].edges_list:
self.list_of_vertices[index].push_edge(edge,True)
def is_vertice_in_graph_based_on_xy(self,vertice):
for i in range(self.number_of_vertices):
v = self.list_of_vertices[i]
if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1] == vertice.coordinates[1]:
return True,i
return False,None
def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):
for i in range(self.number_of_vertices):
v = self.list_of_vertices[i]
if ((v.coordinates[0] - vertice.coordinates[0])**2) + ((v.coordinates[1] - vertice.coordinates[1])**2) < epsilon:
return True, i
return False, None
def __getitem__(self, key):#implement instance[key]
if key >= 0 and key < self.number_of_vertices:
return self.list_of_vertices[key]
else :
raise IndexError
def laplace_matrix(self):
""" Returns the laplace matrix. """
n = self.number_of_vertices
laplace_matrix = np.zeros((n, n))
for i in range(n):
laplace_matrix[i][i] = 1
vertice = self.list_of_vertices[i]
for edge in vertice.edges_list:
laplace_matrix[i][edge.linked[1].index] = 1
return laplace_matrix
def A_matrix(self,type_cost=Edge.given_cost):
""" Returns the laplace matrix. """
n = self.number_of_vertices
A_matrix = np.zeros((n, n))
for i in range(n):
vertice = self.list_of_vertices[i]
for edge in vertice.edges_list:
cost = type_cost(edge)
A_matrix[i][edge.linked[1].index] = cost
A_matrix[edge.linked[1].index][i] = cost
return A_matrix
def pairs_of_vertices(self):
"""Returns the pairs of connected vertices.
Beware ! There might be non-connected vertices in the graph. """
pairs_of_vertices = []
for vertice in self.list_of_vertices:
for edge in vertice.edges_list:
if non_oriented:
if (vertice, edge.linked[1]) and (edge.linked[1], vertice) not in pairs_of_vertices:
pairs_of_vertices.append((vertice, edge.linked[1]))
if not non_oriented:
if (vertice, edge.linked[1]) not in pairs_of_vertices:
pairs_of_vertices.append((vertice, edge.linked[1]))
return pairs_of_vertices
def number_of_edges(self):
a=self.pairs_of_vertices()
assert self.number_of_edges==len(a), "problem in Graph.pairs_of_vertices"
return self.number_of_edges
def search_index_by_coordinates(self,coord):
"""search the index of vertice at coordinates: """
for i in range(len(self.list_of_vertices)):
if self[i].coordinates[0]==coord[0] and self[i].coordinates[1]==coord[1]:
return i
def set_right_edges(self):
"""verify that the graph is coherent """
for v in self:
for e in v.edges_list:
e.linked[0]=v
e.linked[1]=self[self.search_index_by_coordinates(e.linked[1].coordinates)]
for e in self.list_of_edges:
e.linked[0]=self[self.search_index_by_coordinates(e.linked[0].coordinates)]
e.linked[1]=self[self.search_index_by_coordinates(e.linked[1].coordinates)]
def plot(self):
plt.clf()
for v in self._list_of_vertices:
c = f"#{v.color}"
plt.scatter(v.coordinates[0], v.coordinates[1], color=c)
for e in v.edges_list:
c = f"#{e.color}"
x = e.linked[0].coordinates[0]
y = e.linked[0].coordinates[1]
dx = e.linked[1].coordinates[0] - x
dy = e.linked[1].coordinates[1] - y
plt.plot([x,x+dx], [y,y+dy], color=c)
# plt.arrow(x,y,dx,dy)
plt.axis = 'off'
plt.show()
def plot_dev(self):
plt.clf()
for v in self._list_of_vertices:
c = f"#{v.color}"
plt.scatter(v.coordinates[0], v.coordinates[1], color=c)
for e in v.edges_list:
c = f"#{e.color}"
for i in range(len(self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable])-1):
x = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i][0]
y = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i][1]
dx = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i+1][0]-x
dy = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i+1][1]-y
plt.plot([x,x+dx], [y,y+dy], color=c)
plt.axis = 'off'
plt.show()
|
[
"\"\"\" The goal of this file is to have all the information on a graph. \"\"\"\nimport sys, os, inspect\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\n\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport copy\nimport exceptions\ninf = np.inf\n\n#Change this bool according to the situation\nnon_oriented = False\n\nclass Vertice:\n \"\"\"\" All the information of one vertice are contained here. \"\"\"\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0],coordinates[1]])\n self._edges_list = [] # no neighbour by default\n self.priority = inf # priority by default\n self.visited = False # vertice is by default not visited\n self.cost_dijkstra = inf # cost is by default inf\n self.antecedent = -inf # antecedent not defined before using Dijkstra\n #implemente apres passage par panda\n self.index_edges_list = []#seulement implemente apres passage par panda\n\n\n #database\n self.id = None #identifiant idfm de la gare\n self.gare_name = None #nom de la gare\n self.color = None #couleur de la gare\n self.is_a_station= True # boolean True, si le noeud est veritablement une gare. False sinon\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n # We suppose that the index and the coordinates never change.\n # The other properties can.\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n\n \"\"\"\n def cost_between(self, other):\n for edge in self._edges_list:\n [vertice, vertice_voisin] = edge.linked\n if vertice_voisin == other:\n return edge.given_cost\"\"\"\n\n def __repr__(self):\n return f\"Vertice {str(self.index)}\"\n\n def __lt__(self, other):\n return self.priority < other.priority\n\nclass Edge:\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1,vertice2]\n self.id = id #identifiant de la liaison. ici id=nom de la ligne a laqualle appartient la liaison\n self._given_cost = given_cost #cout de deplacement de la liason donne par l'utilisateur ou la base de donnee\n #data_base\n self.color=None #couleur de la liason\n self.connection_with_displayable=None #indice de la liason developpee( trace reel) dans la table de connection connection_table_edge_and_diplayable_edge de la classe graph\n self.index=None\n def set_given_cost(self,cost):\n self._given_cost=cost\n #ne pas mettre @property ici, on veut une methode pas un attribut\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates-self.linked[1].coordinates),(self.linked[0].coordinates-self.linked[1].coordinates))\n def customized_cost1(self):\n V_metro = 25.1 / 3.6 #vitesse moyenne en km/h /3.6 -> vitesse moyenne en m/s\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in [\"A\",\"B\",\"C\",\"D\",\"E\",\"H\",\"I\",\"J\",\"K\",\"L\",\"M\",\"N\",\"P\",\"R\",\"U\",\"TER\",\"GL\"]:\n return self._given_cost/V_train\n if self.id in [str(i) for i in range(1,15)]+[\"ORL\",\"CDG\",\"3b\",\"7b\"]:\n return self._given_cost/V_metro\n if self.id in [\"T\"+str(i) for i in range(1,12)]+[\"T3A\",\"T3B\",\"FUN\"]:\n return self._given_cost/V_tram\n if self.id in [\"RER Walk\"]:\n return self._given_cost/V_pieton\n raise ValueError(\" Dans customized_cost1 \" +self.id+\" non pris en compte dans le calcul de distance\")\n\n def __eq__(self,other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0]==other.linked[0].coordinates[0] and self.linked[0].coordinates[1]==other.linked[0].coordinates[1]\n boul1 = self.linked[1].coordinates[0]==other.linked[1].coordinates[0] and self.linked[1].coordinates[1]==other.linked[1].coordinates[1]\n boulid = self.id==other.id\n return boul0 and boul1 and boulid\n def __ne__(self,other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self==other)==False\n #ne pas mettre @property ici, on veut une methode pas un attribut\n def given_cost(self):\n return self._given_cost\n def __repr__(self):\n return f\"Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!\"\n\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n def __init__(self,list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge=[]\n self.list_of_edges=[]\n self.number_of_disp_edges=0\n self.number_of_edges=0\n\n def push_diplayable_edge(self,bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy(bidim_array))\n self.number_of_disp_edges+=1\n def push_edge(self,e):\n self.number_of_edges+=1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges+=1\n self.list_of_edges.append(e)\n\n\n def push_vertice(self,vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool,index = self.is_vertice_in_graph_based_on_xy_with_tolerance(vertice,10**(-8))\n #bool,index = self.is_vertice_in_graph_based_on_xy(vertice)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates=self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge,True)\n\n\n def is_vertice_in_graph_based_on_xy(self,vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1] == vertice.coordinates[1]:\n return True,i\n return False,None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if ((v.coordinates[0] - vertice.coordinates[0])**2) + ((v.coordinates[1] - vertice.coordinates[1])**2) < epsilon:\n return True, i\n return False, None\n\n\n def __getitem__(self, key):#implement instance[key]\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else :\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self,type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a=self.pairs_of_vertices()\n assert self.number_of_edges==len(a), \"problem in Graph.pairs_of_vertices\"\n return self.number_of_edges\n\n def search_index_by_coordinates(self,coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0]==coord[0] and self[i].coordinates[1]==coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0]=v\n e.linked[1]=self[self.search_index_by_coordinates(e.linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0]=self[self.search_index_by_coordinates(e.linked[0].coordinates)]\n e.linked[1]=self[self.search_index_by_coordinates(e.linked[1].coordinates)]\n\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f\"#{v.color}\"\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f\"#{e.color}\"\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x,x+dx], [y,y+dy], color=c)\n # plt.arrow(x,y,dx,dy)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f\"#{v.color}\"\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f\"#{e.color}\"\n for i in range(len(self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable])-1):\n x = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i+1][0]-x\n dy = self.connection_table_edge_and_diplayable_edge[e.connection_with_displayable][i+1][1]-y\n plt.plot([x,x+dx], [y,y+dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\nimport sys, os, inspect\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.\n currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport copy\nimport exceptions\ninf = np.inf\nnon_oriented = False\n\n\nclass Vertice:\n \"\"\"\" All the information of one vertice are contained here. \"\"\"\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n \"\"\"\n def cost_between(self, other):\n for edge in self._edges_list:\n [vertice, vertice_voisin] = edge.linked\n if vertice_voisin == other:\n return edge.given_cost\"\"\"\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n\n def __lt__(self, other):\n return self.priority < other.priority\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\ncurrentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.\n currentframe())))\nparentdir = os.path.dirname(currentdir)\nsys.path.insert(0, parentdir)\n<import token>\ninf = np.inf\nnon_oriented = False\n\n\nclass Vertice:\n \"\"\"\" All the information of one vertice are contained here. \"\"\"\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n \"\"\"\n def cost_between(self, other):\n for edge in self._edges_list:\n [vertice, vertice_voisin] = edge.linked\n if vertice_voisin == other:\n return edge.given_cost\"\"\"\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n\n def __lt__(self, other):\n return self.priority < other.priority\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\nsys.path.insert(0, parentdir)\n<import token>\n<assignment token>\n\n\nclass Vertice:\n \"\"\"\" All the information of one vertice are contained here. \"\"\"\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n \"\"\"\n def cost_between(self, other):\n for edge in self._edges_list:\n [vertice, vertice_voisin] = edge.linked\n if vertice_voisin == other:\n return edge.given_cost\"\"\"\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n\n def __lt__(self, other):\n return self.priority < other.priority\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n \"\"\"\" All the information of one vertice are contained here. \"\"\"\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n \"\"\"\n def cost_between(self, other):\n for edge in self._edges_list:\n [vertice, vertice_voisin] = edge.linked\n if vertice_voisin == other:\n return edge.given_cost\"\"\"\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n\n def __lt__(self, other):\n return self.priority < other.priority\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n\n def __lt__(self, other):\n return self.priority < other.priority\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n\n def neighbours_list(self, list_tuple, id=0):\n self._edges_list.clear()\n \"\"\"interface with old constructor , tuple=(neighbour_vertice,cost) is an element of list_tuple \"\"\"\n for tuple in list_tuple:\n E = Edge(self, tuple[0], id, tuple[1])\n self._edges_list.append(E)\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n\n def is_linked(self, other):\n \"\"\"returns True if there is an edge between self and other\"\"\"\n for edge in self._edges_list:\n if other.index == edge.linked[1].index:\n return True\n return False\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n\n def get_lines_connected(self):\n list_of_line = []\n for edge in self._edges_list:\n if edge.id not in list_of_line:\n list_of_line.append(edge.id)\n return list_of_line\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n <function token>\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n\n def push_edge(self, edge, coords_verif=False):\n if coords_verif:\n exceptions.check_pertinent_edge_coords_verif(self, edge)\n else:\n exceptions.check_pertinent_edge(self, edge)\n self._edges_list.append(edge)\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n\n def __init__(self, index, coordinates):\n \"\"\" Entry : index of the vertice in list_of_vertices\n and the coordinates of the vertice. \"\"\"\n self.index = index\n self.coordinates = np.array([coordinates[0], coordinates[1]])\n self._edges_list = []\n self.priority = inf\n self.visited = False\n self.cost_dijkstra = inf\n self.antecedent = -inf\n self.index_edges_list = []\n self.id = None\n self.gare_name = None\n self.color = None\n self.is_a_station = True\n <function token>\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n <function token>\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n <function token>\n <function token>\n\n @property\n def edges_list(self):\n \"\"\" Returns the list of neighbour. \"\"\"\n return self._edges_list\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n <function token>\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n <function token>\n <docstring token>\n\n def __repr__(self):\n return f'Vertice {str(self.index)}'\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n @edges_list.setter\n def edges_list(self, edges_list):\n \"\"\" An element of edges_list is an edge \"\"\"\n for e in edges_list:\n exceptions.check_pertinent_edge(self, e)\n self._edges_list = edges_list\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n <function token>\n <docstring token>\n <function token>\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def number_of_neighbours(self):\n return len(self._edges_list)\n <function token>\n <function token>\n <docstring token>\n <function token>\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n\n\nclass Vertice:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <docstring token>\n <function token>\n <function token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n\n def given_cost(self):\n return self._given_cost\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n\n def square_euclidian_cost(self):\n return np.dot(np.transpose(self.linked[0].coordinates - self.linked\n [1].coordinates), self.linked[0].coordinates - self.linked[1].\n coordinates)\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n\n def customized_cost1(self):\n V_metro = 25.1 / 3.6\n V_train = 49.6 / 3.6\n V_tram = 18 / 3.6\n V_pieton = 4 / 3.6\n if self.id in ['A', 'B', 'C', 'D', 'E', 'H', 'I', 'J', 'K', 'L',\n 'M', 'N', 'P', 'R', 'U', 'TER', 'GL']:\n return self._given_cost / V_train\n if self.id in [str(i) for i in range(1, 15)] + ['ORL', 'CDG', '3b',\n '7b']:\n return self._given_cost / V_metro\n if self.id in [('T' + str(i)) for i in range(1, 12)] + ['T3A',\n 'T3B', 'FUN']:\n return self._given_cost / V_tram\n if self.id in ['RER Walk']:\n return self._given_cost / V_pieton\n raise ValueError(' Dans customized_cost1 ' + self.id +\n ' non pris en compte dans le calcul de distance')\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n <function token>\n\n def __eq__(self, other):\n \"\"\"2 edges are equal iff same cordinates and same id \"\"\"\n boul0 = self.linked[0].coordinates[0] == other.linked[0].coordinates[0\n ] and self.linked[0].coordinates[1] == other.linked[0].coordinates[\n 1]\n boul1 = self.linked[1].coordinates[0] == other.linked[1].coordinates[0\n ] and self.linked[1].coordinates[1] == other.linked[1].coordinates[\n 1]\n boulid = self.id == other.id\n return boul0 and boul1 and boulid\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n\n def set_given_cost(self, cost):\n self._given_cost = cost\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n <function token>\n <function token>\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n <function token>\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n <function token>\n <function token>\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n\n def __repr__(self):\n return (\n f'Edge [{str(self.linked[0].index)}, {str(self.linked[1].index)}] !oriented!'\n )\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n <function token>\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n <function token>\n <function token>\n\n def __ne__(self, other):\n \"\"\"2 edges are not equal iff they are not equal :) \"\"\"\n return (self == other) == False\n <function token>\n <function token>\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n <function token>\n\n def euclidian_cost(self):\n return np.sqrt(self.square_euclidian_cost())\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n\n def __init__(self, vertice1, vertice2, id, given_cost=0):\n self.linked = [vertice1, vertice2]\n self.id = id\n self._given_cost = given_cost\n self.color = None\n self.connection_with_displayable = None\n self.index = None\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n\n\nclass Edge:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n \"\"\" All the information of a graph are contained here. \"\"\"\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n\n def search_index_by_coordinates(self, coord):\n \"\"\"search the index of vertice at coordinates: \"\"\"\n for i in range(len(self.list_of_vertices)):\n if self[i].coordinates[0] == coord[0] and self[i].coordinates[1\n ] == coord[1]:\n return i\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n\n def plot(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n x = e.linked[0].coordinates[0]\n y = e.linked[0].coordinates[1]\n dx = e.linked[1].coordinates[0] - x\n dy = e.linked[1].coordinates[1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n\n def push_edge(self, e):\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n\n def push_vertice(self, vertice):\n self.list_of_vertices.append(vertice)\n self.number_of_vertices += 1\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n\n def push_diplayable_edge(self, bidim_array):\n self.connection_table_edge_and_diplayable_edge.append(copy.deepcopy\n (bidim_array))\n self.number_of_disp_edges += 1\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n\n def __init__(self, list_of_vertices):\n \"\"\" Entry : the list of vertices. \"\"\"\n self.list_of_vertices = list_of_vertices\n self.number_of_vertices = len(list_of_vertices)\n self.connection_table_edge_and_diplayable_edge = []\n self.list_of_edges = []\n self.number_of_disp_edges = 0\n self.number_of_edges = 0\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n\n def pairs_of_vertices(self):\n \"\"\"Returns the pairs of connected vertices.\n Beware ! There might be non-connected vertices in the graph. \"\"\"\n pairs_of_vertices = []\n for vertice in self.list_of_vertices:\n for edge in vertice.edges_list:\n if non_oriented:\n if (vertice, edge.linked[1]) and (edge.linked[1], vertice\n ) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n if not non_oriented:\n if (vertice, edge.linked[1]) not in pairs_of_vertices:\n pairs_of_vertices.append((vertice, edge.linked[1]))\n return pairs_of_vertices\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n\n def __getitem__(self, key):\n if key >= 0 and key < self.number_of_vertices:\n return self.list_of_vertices[key]\n else:\n raise IndexError\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n <function token>\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n\n def A_matrix(self, type_cost=Edge.given_cost):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n A_matrix = np.zeros((n, n))\n for i in range(n):\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n cost = type_cost(edge)\n A_matrix[i][edge.linked[1].index] = cost\n A_matrix[edge.linked[1].index][i] = cost\n return A_matrix\n <function token>\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n\n def push_vertice_without_doublons(self, vertice):\n bool, index = self.is_vertice_in_graph_based_on_xy_with_tolerance(\n vertice, 10 ** -8)\n if bool == False:\n self.push_vertice(vertice)\n else:\n vertice.coordinates = self.list_of_vertices[index].coordinates\n for edge in vertice.edges_list:\n if edge not in self.list_of_vertices[index].edges_list:\n self.list_of_vertices[index].push_edge(edge, True)\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n\n def number_of_edges(self):\n a = self.pairs_of_vertices()\n assert self.number_of_edges == len(a\n ), 'problem in Graph.pairs_of_vertices'\n return self.number_of_edges\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy(self, vertice):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if v.coordinates[0] == vertice.coordinates[0] and v.coordinates[1\n ] == vertice.coordinates[1]:\n return True, i\n return False, None\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n\n def push_edge_without_doublons(self, e):\n if e not in self.list_of_edges:\n self.number_of_edges += 1\n self.list_of_edges.append(e)\n <function token>\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n\n def plot_dev(self):\n plt.clf()\n for v in self._list_of_vertices:\n c = f'#{v.color}'\n plt.scatter(v.coordinates[0], v.coordinates[1], color=c)\n for e in v.edges_list:\n c = f'#{e.color}'\n for i in range(len(self.\n connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable]) - 1):\n x = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][0]\n y = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i][1]\n dx = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][0] - x\n dy = self.connection_table_edge_and_diplayable_edge[e.\n connection_with_displayable][i + 1][1] - y\n plt.plot([x, x + dx], [y, y + dy], color=c)\n plt.axis = 'off'\n plt.show()\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n <function token>\n <function token>\n\n def set_right_edges(self):\n \"\"\"verify that the graph is coherent \"\"\"\n for v in self:\n for e in v.edges_list:\n e.linked[0] = v\n e.linked[1] = self[self.search_index_by_coordinates(e.\n linked[1].coordinates)]\n for e in self.list_of_edges:\n e.linked[0] = self[self.search_index_by_coordinates(e.linked[0]\n .coordinates)]\n e.linked[1] = self[self.search_index_by_coordinates(e.linked[1]\n .coordinates)]\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n\n def laplace_matrix(self):\n \"\"\" Returns the laplace matrix. \"\"\"\n n = self.number_of_vertices\n laplace_matrix = np.zeros((n, n))\n for i in range(n):\n laplace_matrix[i][i] = 1\n vertice = self.list_of_vertices[i]\n for edge in vertice.edges_list:\n laplace_matrix[i][edge.linked[1].index] = 1\n return laplace_matrix\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n def is_vertice_in_graph_based_on_xy_with_tolerance(self, vertice, epsilon):\n for i in range(self.number_of_vertices):\n v = self.list_of_vertices[i]\n if (v.coordinates[0] - vertice.coordinates[0]) ** 2 + (v.\n coordinates[1] - vertice.coordinates[1]) ** 2 < epsilon:\n return True, i\n return False, None\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n\n\nclass Graph:\n <docstring token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n",
"<docstring token>\n<import token>\n<assignment token>\n<code token>\n<import token>\n<assignment token>\n<class token>\n<class token>\n<class token>\n"
] | false |
99,090 |
69e0ad377442a0a0af58bec5da75e17d657914d3
|
# Create your views here.
from django.http import HttpResponse
from django.shortcuts import render_to_response
from appvimeo.models import search_posts
from django.template.loader import get_template
from django.template import Context
def home(request):
if 'q' in request.GET and request.GET['q']:
message = 'You searched for: %r' % request.GET['q']
else:
messge = 'you submitted an empty form'
if 'q' in request.GET and request.GET['q']:
q = request.GET['q']
entry = search_posts.objects.filter(name__icontains=q)
return render_to_response('searchvimeo.html',locals())
|
[
"# Create your views here.\nfrom django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom appvimeo.models import search_posts\nfrom django.template.loader import get_template\nfrom django.template import Context\n\n\ndef home(request):\n\n \n if 'q' in request.GET and request.GET['q']:\n message = 'You searched for: %r' % request.GET['q']\n else:\n messge = 'you submitted an empty form' \n if 'q' in request.GET and request.GET['q']:\n q = request.GET['q']\n entry = search_posts.objects.filter(name__icontains=q)\n\n return render_to_response('searchvimeo.html',locals())\n\n\n",
"from django.http import HttpResponse\nfrom django.shortcuts import render_to_response\nfrom appvimeo.models import search_posts\nfrom django.template.loader import get_template\nfrom django.template import Context\n\n\ndef home(request):\n if 'q' in request.GET and request.GET['q']:\n message = 'You searched for: %r' % request.GET['q']\n else:\n messge = 'you submitted an empty form'\n if 'q' in request.GET and request.GET['q']:\n q = request.GET['q']\n entry = search_posts.objects.filter(name__icontains=q)\n return render_to_response('searchvimeo.html', locals())\n",
"<import token>\n\n\ndef home(request):\n if 'q' in request.GET and request.GET['q']:\n message = 'You searched for: %r' % request.GET['q']\n else:\n messge = 'you submitted an empty form'\n if 'q' in request.GET and request.GET['q']:\n q = request.GET['q']\n entry = search_posts.objects.filter(name__icontains=q)\n return render_to_response('searchvimeo.html', locals())\n",
"<import token>\n<function token>\n"
] | false |
99,091 |
c7065857ac98cb80fdd8b6b1aa03d6e566e89d0c
|
version https://git-lfs.github.com/spec/v1
oid sha256:f105c568cf2a42d027eb8c97378cfe8f7c33abab872f300055ef6fbb5290f2cb
size 1666
|
[
"version https://git-lfs.github.com/spec/v1\noid sha256:f105c568cf2a42d027eb8c97378cfe8f7c33abab872f300055ef6fbb5290f2cb\nsize 1666\n"
] | true |
99,092 |
d1c4cc16c356a07c259f3c3e5cad0c16869eeb33
|
import glob
import pygame
from pygame.locals import *
import sys
import random
WIDTH = 1920
HEIGHT = 1080
FPS = 30
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
YELLOW = (255, 216, 0)
text = (207, 95, 63)
BACKGROUND = pygame.image.load("assets/img/bg2.png")
INTRO = pygame.image.load("assets/img/start.png")
WIN = pygame.image.load("assets/img/win.png")
screen = pygame.display.set_mode((WIDTH, HEIGHT), FULLSCREEN)
class NoMiniGame:
def __init__(self):
self.x = 0
self.y = 0
self.close_x = 0
self.close_y = 0
self.close = pygame.image.load("assets/img/close.png")
self.img = None
self.path = path = "assets/img/nominigame"
self.load_image()
self.random_move()
self.random_move_close()
def load_image(self):
imgs = glob.glob(self.path + "/*.png")
# print(imgs)
self.img = pygame.image.load(random.choice(imgs))
def draw(self, screen):
screen.blit(self.img, self.get_rect())
screen.blit(self.close, self.get_rect_close())
def move(self, x, y):
self.x = x
self.y = y
def random_move(self):
rect = self.get_rect()
x_offset = WIDTH - rect.width
y_offset = HEIGHT - rect.height
if x_offset != 0:
self.x = random.randrange(0, x_offset)
if y_offset != 0:
self.y = random.randrange(0, y_offset)
def get_rect(self):
image = self.img
rect = image.get_rect()
rect.x = self.x
rect.y = self.y
return rect
def get_rect_close(self):
image = self.close
rect = image.get_rect()
rect.x = self.close_x
rect.y = self.close_y
return rect
def random_move_close(self):
rect = self.get_rect()
close_rect = self.get_rect_close()
self.close_x = random.randrange(self.x, self.x + rect.width - close_rect.width)
self.close_y = random.randrange(self.y, self.y + rect.height - close_rect.height)
def check_close(self, x, y):
rect = self.get_rect_close()
return (self.close_x < x < self.close_x + rect.width) and (self.close_y < y < self.close_y + rect.height)
def check_ad_click(self, x, y):
rect = self.get_rect()
return (self.x < x < self.x + rect.width) and (self.close_y < y < self.close_y + rect.height)
ads = [NoMiniGame()]
ad_streak = 0
ad_countdown_min = 2
ad_countdown_max = 5
ad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)
count = 0
score = 10000
intro = True
win = False
if __name__ == "__main__":
pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)
pygame.init()
clock = pygame.time.Clock()
mainLoop = True
font = pygame.font.SysFont("monospace", 50)
screen.blit(BACKGROUND, BACKGROUND.get_rect())
while mainLoop:
pygame.event.pump()
clock.tick(FPS)
keys = pygame.key.get_pressed()
mx, my = pygame.mouse.get_pos()
if keys[pygame.K_ESCAPE] or keys[pygame.K_q]:
pygame.quit()
sys.exit(0)
ev = pygame.event.get()
if intro:
ads = [NoMiniGame()]
for event in ev:
if event.type == pygame.MOUSEBUTTONUP:
intro = False
screen.blit(INTRO, (0, 0, HEIGHT, WIDTH))
else:
if win and len(ads) == 0:
for event in ev:
if event.type == pygame.MOUSEBUTTONUP:
intro = True
win = False
screen.blit(WIN, (0, 0, HEIGHT, WIDTH))
label = font.render("SCORE = " + str(score), 1, text)
screen.blit(label, (0, 0))
else:
screen.blit(BACKGROUND, (0, 0, HEIGHT, WIDTH))
count += 1
if count >= ad_countdown * (FPS / 3):
ads.append(NoMiniGame())
ad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)
score -= 100
if len(ads) >= 20:
intro = True
count = 0
# proceed events
for event in ev:
if event.type == pygame.MOUSEBUTTONUP:
for ad in ads:
if ad.check_close(mx, my):
ads.remove(ad)
break
elif ad.check_ad_click(mx, my):
ads.append(NoMiniGame())
ad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)
score -= 100
if len(ads) >= 20:
intro = True
if len(ads) <= 1:
ad_streak += 1
if ad_streak > 10 * FPS:
win = True
else:
ad_streak = 0
for ad in ads:
ad.draw(screen)
pygame.display.flip()
pygame.quit()
|
[
"\nimport glob\n\nimport pygame\nfrom pygame.locals import *\nimport sys\nimport random\n\nWIDTH = 1920\nHEIGHT = 1080\nFPS = 30\n\nBLACK = (0, 0, 0)\nWHITE = (255, 255, 255)\nBLUE = (0, 0, 255)\nGREEN = (0, 255, 0)\nRED = (255, 0, 0)\nYELLOW = (255, 216, 0)\ntext = (207, 95, 63)\n\nBACKGROUND = pygame.image.load(\"assets/img/bg2.png\")\n\nINTRO = pygame.image.load(\"assets/img/start.png\")\n\nWIN = pygame.image.load(\"assets/img/win.png\")\n\nscreen = pygame.display.set_mode((WIDTH, HEIGHT), FULLSCREEN)\n\n\nclass NoMiniGame:\n def __init__(self):\n self.x = 0\n self.y = 0\n\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load(\"assets/img/close.png\")\n\n self.img = None\n self.path = path = \"assets/img/nominigame\"\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + \"/*.png\")\n # print(imgs)\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n\n def random_move(self):\n rect = self.get_rect()\n x_offset = WIDTH - rect.width\n y_offset = HEIGHT - rect.height\n if x_offset != 0:\n self.x = random.randrange(0, x_offset)\n if y_offset != 0:\n self.y = random.randrange(0, y_offset)\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width - close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height - close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width) and (self.close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width) and (self.close_y < y < self.close_y + rect.height)\n\n\nads = [NoMiniGame()]\nad_streak = 0\nad_countdown_min = 2\nad_countdown_max = 5\nad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)\ncount = 0\n\nscore = 10000\n\nintro = True\nwin = False\n\nif __name__ == \"__main__\":\n pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)\n pygame.init()\n clock = pygame.time.Clock()\n mainLoop = True\n font = pygame.font.SysFont(\"monospace\", 50)\n screen.blit(BACKGROUND, BACKGROUND.get_rect())\n\n while mainLoop:\n pygame.event.pump()\n clock.tick(FPS)\n\n keys = pygame.key.get_pressed()\n mx, my = pygame.mouse.get_pos()\n\n if keys[pygame.K_ESCAPE] or keys[pygame.K_q]:\n pygame.quit()\n sys.exit(0)\n ev = pygame.event.get()\n\n if intro:\n ads = [NoMiniGame()]\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = False\n screen.blit(INTRO, (0, 0, HEIGHT, WIDTH))\n else:\n if win and len(ads) == 0:\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = True\n win = False\n screen.blit(WIN, (0, 0, HEIGHT, WIDTH))\n label = font.render(\"SCORE = \" + str(score), 1, text)\n screen.blit(label, (0, 0))\n\n else:\n screen.blit(BACKGROUND, (0, 0, HEIGHT, WIDTH))\n count += 1\n if count >= ad_countdown * (FPS / 3):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n count = 0\n\n # proceed events\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n for ad in ads:\n if ad.check_close(mx, my):\n ads.remove(ad)\n break\n elif ad.check_ad_click(mx, my):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n if len(ads) <= 1:\n ad_streak += 1\n if ad_streak > 10 * FPS:\n win = True\n else:\n ad_streak = 0\n\n for ad in ads:\n ad.draw(screen)\n\n pygame.display.flip()\n\n pygame.quit()\n",
"import glob\nimport pygame\nfrom pygame.locals import *\nimport sys\nimport random\nWIDTH = 1920\nHEIGHT = 1080\nFPS = 30\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nBLUE = 0, 0, 255\nGREEN = 0, 255, 0\nRED = 255, 0, 0\nYELLOW = 255, 216, 0\ntext = 207, 95, 63\nBACKGROUND = pygame.image.load('assets/img/bg2.png')\nINTRO = pygame.image.load('assets/img/start.png')\nWIN = pygame.image.load('assets/img/win.png')\nscreen = pygame.display.set_mode((WIDTH, HEIGHT), FULLSCREEN)\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + '/*.png')\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n\n def random_move(self):\n rect = self.get_rect()\n x_offset = WIDTH - rect.width\n y_offset = HEIGHT - rect.height\n if x_offset != 0:\n self.x = random.randrange(0, x_offset)\n if y_offset != 0:\n self.y = random.randrange(0, y_offset)\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\nads = [NoMiniGame()]\nad_streak = 0\nad_countdown_min = 2\nad_countdown_max = 5\nad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)\ncount = 0\nscore = 10000\nintro = True\nwin = False\nif __name__ == '__main__':\n pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)\n pygame.init()\n clock = pygame.time.Clock()\n mainLoop = True\n font = pygame.font.SysFont('monospace', 50)\n screen.blit(BACKGROUND, BACKGROUND.get_rect())\n while mainLoop:\n pygame.event.pump()\n clock.tick(FPS)\n keys = pygame.key.get_pressed()\n mx, my = pygame.mouse.get_pos()\n if keys[pygame.K_ESCAPE] or keys[pygame.K_q]:\n pygame.quit()\n sys.exit(0)\n ev = pygame.event.get()\n if intro:\n ads = [NoMiniGame()]\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = False\n screen.blit(INTRO, (0, 0, HEIGHT, WIDTH))\n elif win and len(ads) == 0:\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = True\n win = False\n screen.blit(WIN, (0, 0, HEIGHT, WIDTH))\n label = font.render('SCORE = ' + str(score), 1, text)\n screen.blit(label, (0, 0))\n else:\n screen.blit(BACKGROUND, (0, 0, HEIGHT, WIDTH))\n count += 1\n if count >= ad_countdown * (FPS / 3):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n count = 0\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n for ad in ads:\n if ad.check_close(mx, my):\n ads.remove(ad)\n break\n elif ad.check_ad_click(mx, my):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n if len(ads) <= 1:\n ad_streak += 1\n if ad_streak > 10 * FPS:\n win = True\n else:\n ad_streak = 0\n for ad in ads:\n ad.draw(screen)\n pygame.display.flip()\n pygame.quit()\n",
"<import token>\nWIDTH = 1920\nHEIGHT = 1080\nFPS = 30\nBLACK = 0, 0, 0\nWHITE = 255, 255, 255\nBLUE = 0, 0, 255\nGREEN = 0, 255, 0\nRED = 255, 0, 0\nYELLOW = 255, 216, 0\ntext = 207, 95, 63\nBACKGROUND = pygame.image.load('assets/img/bg2.png')\nINTRO = pygame.image.load('assets/img/start.png')\nWIN = pygame.image.load('assets/img/win.png')\nscreen = pygame.display.set_mode((WIDTH, HEIGHT), FULLSCREEN)\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + '/*.png')\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n\n def random_move(self):\n rect = self.get_rect()\n x_offset = WIDTH - rect.width\n y_offset = HEIGHT - rect.height\n if x_offset != 0:\n self.x = random.randrange(0, x_offset)\n if y_offset != 0:\n self.y = random.randrange(0, y_offset)\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\nads = [NoMiniGame()]\nad_streak = 0\nad_countdown_min = 2\nad_countdown_max = 5\nad_countdown = random.randrange(ad_countdown_min, ad_countdown_max)\ncount = 0\nscore = 10000\nintro = True\nwin = False\nif __name__ == '__main__':\n pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)\n pygame.init()\n clock = pygame.time.Clock()\n mainLoop = True\n font = pygame.font.SysFont('monospace', 50)\n screen.blit(BACKGROUND, BACKGROUND.get_rect())\n while mainLoop:\n pygame.event.pump()\n clock.tick(FPS)\n keys = pygame.key.get_pressed()\n mx, my = pygame.mouse.get_pos()\n if keys[pygame.K_ESCAPE] or keys[pygame.K_q]:\n pygame.quit()\n sys.exit(0)\n ev = pygame.event.get()\n if intro:\n ads = [NoMiniGame()]\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = False\n screen.blit(INTRO, (0, 0, HEIGHT, WIDTH))\n elif win and len(ads) == 0:\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = True\n win = False\n screen.blit(WIN, (0, 0, HEIGHT, WIDTH))\n label = font.render('SCORE = ' + str(score), 1, text)\n screen.blit(label, (0, 0))\n else:\n screen.blit(BACKGROUND, (0, 0, HEIGHT, WIDTH))\n count += 1\n if count >= ad_countdown * (FPS / 3):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n count = 0\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n for ad in ads:\n if ad.check_close(mx, my):\n ads.remove(ad)\n break\n elif ad.check_ad_click(mx, my):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n if len(ads) <= 1:\n ad_streak += 1\n if ad_streak > 10 * FPS:\n win = True\n else:\n ad_streak = 0\n for ad in ads:\n ad.draw(screen)\n pygame.display.flip()\n pygame.quit()\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + '/*.png')\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n\n def random_move(self):\n rect = self.get_rect()\n x_offset = WIDTH - rect.width\n y_offset = HEIGHT - rect.height\n if x_offset != 0:\n self.x = random.randrange(0, x_offset)\n if y_offset != 0:\n self.y = random.randrange(0, y_offset)\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\nif __name__ == '__main__':\n pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)\n pygame.init()\n clock = pygame.time.Clock()\n mainLoop = True\n font = pygame.font.SysFont('monospace', 50)\n screen.blit(BACKGROUND, BACKGROUND.get_rect())\n while mainLoop:\n pygame.event.pump()\n clock.tick(FPS)\n keys = pygame.key.get_pressed()\n mx, my = pygame.mouse.get_pos()\n if keys[pygame.K_ESCAPE] or keys[pygame.K_q]:\n pygame.quit()\n sys.exit(0)\n ev = pygame.event.get()\n if intro:\n ads = [NoMiniGame()]\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = False\n screen.blit(INTRO, (0, 0, HEIGHT, WIDTH))\n elif win and len(ads) == 0:\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n intro = True\n win = False\n screen.blit(WIN, (0, 0, HEIGHT, WIDTH))\n label = font.render('SCORE = ' + str(score), 1, text)\n screen.blit(label, (0, 0))\n else:\n screen.blit(BACKGROUND, (0, 0, HEIGHT, WIDTH))\n count += 1\n if count >= ad_countdown * (FPS / 3):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n count = 0\n for event in ev:\n if event.type == pygame.MOUSEBUTTONUP:\n for ad in ads:\n if ad.check_close(mx, my):\n ads.remove(ad)\n break\n elif ad.check_ad_click(mx, my):\n ads.append(NoMiniGame())\n ad_countdown = random.randrange(ad_countdown_min,\n ad_countdown_max)\n score -= 100\n if len(ads) >= 20:\n intro = True\n if len(ads) <= 1:\n ad_streak += 1\n if ad_streak > 10 * FPS:\n win = True\n else:\n ad_streak = 0\n for ad in ads:\n ad.draw(screen)\n pygame.display.flip()\n pygame.quit()\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + '/*.png')\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n\n def random_move(self):\n rect = self.get_rect()\n x_offset = WIDTH - rect.width\n y_offset = HEIGHT - rect.height\n if x_offset != 0:\n self.x = random.randrange(0, x_offset)\n if y_offset != 0:\n self.y = random.randrange(0, y_offset)\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n\n def load_image(self):\n imgs = glob.glob(self.path + '/*.png')\n self.img = pygame.image.load(random.choice(imgs))\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n\n def random_move_close(self):\n rect = self.get_rect()\n close_rect = self.get_rect_close()\n self.close_x = random.randrange(self.x, self.x + rect.width -\n close_rect.width)\n self.close_y = random.randrange(self.y, self.y + rect.height -\n close_rect.height)\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n\n def __init__(self):\n self.x = 0\n self.y = 0\n self.close_x = 0\n self.close_y = 0\n self.close = pygame.image.load('assets/img/close.png')\n self.img = None\n self.path = path = 'assets/img/nominigame'\n self.load_image()\n self.random_move()\n self.random_move_close()\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n\n def get_rect(self):\n image = self.img\n rect = image.get_rect()\n rect.x = self.x\n rect.y = self.y\n return rect\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n <function token>\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n\n def check_close(self, x, y):\n rect = self.get_rect_close()\n return (self.close_x < x < self.close_x + rect.width and self.\n close_y < y < self.close_y + rect.height)\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n <function token>\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n <function token>\n\n def check_ad_click(self, x, y):\n rect = self.get_rect()\n return (self.x < x < self.x + rect.width and self.close_y < y < \n self.close_y + rect.height)\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n\n def move(self, x, y):\n self.x = x\n self.y = y\n <function token>\n <function token>\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n <function token>\n <function token>\n <function token>\n\n def get_rect_close(self):\n image = self.close\n rect = image.get_rect()\n rect.x = self.close_x\n rect.y = self.close_y\n return rect\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n\n def draw(self, screen):\n screen.blit(self.img, self.get_rect())\n screen.blit(self.close, self.get_rect_close())\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n\n\nclass NoMiniGame:\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n <function token>\n\n\n<assignment token>\n<code token>\n",
"<import token>\n<assignment token>\n<class token>\n<assignment token>\n<code token>\n"
] | false |
99,093 |
7ed647e992d42f22537f174683960237243a66f3
|
####################################
# Main File
####################################
from gui import run
####################################
## run without preloaded data
run.runApp()
## run with preloaded data
# run.runApp(preloadedData=True)
|
[
"####################################\n# Main File\n####################################\n\nfrom gui import run\n\n####################################\n\n## run without preloaded data\nrun.runApp()\n\n## run with preloaded data\n# run.runApp(preloadedData=True)",
"from gui import run\nrun.runApp()\n",
"<import token>\nrun.runApp()\n",
"<import token>\n<code token>\n"
] | false |
99,094 |
98191c0a159315e68acbd1917d6016684aa4fcfd
|
# Generated by Django 2.1.2 on 2018-10-30 12:19
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('crypto_track', '0010_auto_20181029_1557'),
]
operations = [
migrations.AddField(
model_name='cryptocandle',
name='trend_date',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crypto_track.PyTrends'),
),
migrations.AlterField(
model_name='cryptocandle',
name='update_timestamp',
field=models.DateTimeField(default=datetime.datetime(2018, 10, 30, 12, 19, 57, 587809, tzinfo=utc)),
),
]
|
[
"# Generated by Django 2.1.2 on 2018-10-30 12:19\n\nimport datetime\nfrom django.db import migrations, models\nimport django.db.models.deletion\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('crypto_track', '0010_auto_20181029_1557'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='cryptocandle',\n name='trend_date',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crypto_track.PyTrends'),\n ),\n migrations.AlterField(\n model_name='cryptocandle',\n name='update_timestamp',\n field=models.DateTimeField(default=datetime.datetime(2018, 10, 30, 12, 19, 57, 587809, tzinfo=utc)),\n ),\n ]\n",
"import datetime\nfrom django.db import migrations, models\nimport django.db.models.deletion\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n dependencies = [('crypto_track', '0010_auto_20181029_1557')]\n operations = [migrations.AddField(model_name='cryptocandle', name=\n 'trend_date', field=models.ForeignKey(null=True, on_delete=django.\n db.models.deletion.SET_NULL, to='crypto_track.PyTrends')),\n migrations.AlterField(model_name='cryptocandle', name=\n 'update_timestamp', field=models.DateTimeField(default=datetime.\n datetime(2018, 10, 30, 12, 19, 57, 587809, tzinfo=utc)))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('crypto_track', '0010_auto_20181029_1557')]\n operations = [migrations.AddField(model_name='cryptocandle', name=\n 'trend_date', field=models.ForeignKey(null=True, on_delete=django.\n db.models.deletion.SET_NULL, to='crypto_track.PyTrends')),\n migrations.AlterField(model_name='cryptocandle', name=\n 'update_timestamp', field=models.DateTimeField(default=datetime.\n datetime(2018, 10, 30, 12, 19, 57, 587809, tzinfo=utc)))]\n",
"<import token>\n\n\nclass Migration(migrations.Migration):\n <assignment token>\n <assignment token>\n",
"<import token>\n<class token>\n"
] | false |
99,095 |
5a4e6285b8d4abf2609ad8d927e72fdbeb6be096
|
# -*- coding: utf-8 -*-
from os import listdir
from os.path import isfile, join
from nltk import FreqDist
from GoH import utilities
from GoH import reports
from GoH import charts
import pandas as pd
import numpy as np
import operator
from bokeh.plotting import figure, output_file, output_notebook, save, show
import re
from collections import defaultdict
def identify_errors(tokens, dictionary):
"""Compare words in documents to words in dictionary.
Args:
tokens (list): List of all tokens in the document.
dictionary (set): The set of approved words.
Returns:
set : Returns the set of tokens in the documents that are not
also dictionary words.
"""
return set(tokens).difference(dictionary)
def get_error_stats(errors, tokens):
""" Returns a dictionary recording each error and its
frequency in the document.
Uses the FreqDist function from NLTK.
Args:
errors (set): Set of errors identified in `identify_errors`.
tokens (list): Tokenized content of the file being evaluated.
"""
freq_distribution = FreqDist(tokens)
error_report = {}
for error in list(errors):
error_count = freq_distribution[error]
error_report.update({error:error_count})
return error_report
def total_errors(error_report):
""" Calculates the total errors recorded in the document.
Args:
error_report (dict): Dictionary of errors and counts generated
using `get_error_stats` function.
"""
return(sum(error_report.values()))
def error_rate(error_total, tokens):
""" Calculates the error rate of the document to 3 decimal places.
Arguments:
error_total -- Integer. Calculated using the `total_errors`
function from the dictionary of errors and their counts.
tokens -- List of tokens that compose the text
"""
if len(tokens) > 0:
return(float("{0:.3f}".format(error_total/len(tokens))))
else:
return(np.nan)
def generate_doc_report(text, spelling_dictionary):
"""
Creates a report (dictionary) on each document that includes:
- number of tokens (num_tokens)
- number of unique tokens (num_unique_tokens)
- number of errors (num_errors)
- error rate for the document (error_rate)
- dictionary of the errors and their counts (errors)
Uses a number of functions, including:
- `GoH.utilities.strip_punct`
- `GoH.utilities.tokenize_text`
- `GoH.utilities.to_lower`
- `GoH.utilities.identify_errors`
- `GoH.reports.get_error_stats`
- `GoH.reports.total_errors`
- `GoH.reports.error_rate`
Arguments:
- text -- the content of the file being evaluated
- spelling_dictionary -- a set containing the collection of verified words.
"""
text = utilities.strip_punct(text)
tokens = utilities.tokenize_text(text)
tokens = utilities.to_lower(tokens)
errors = identify_errors(tokens, spelling_dictionary)
error_report = get_error_stats(errors, tokens)
error_total = total_errors(error_report)
rate = error_rate(error_total, tokens)
return {'num_tokens': len(tokens),
'num_unique_tokens': len(set(tokens)),
'num_errors': error_total,
'error_rate': rate,
'errors': error_report}
def process_directory(directory, spelling_dictionary):
"""
Composit function for processing an entire directory of files.
Returns the statistics on the whole directory as a list of dictionaries.
Uses the following functions:
- `GoH.utilities.readfile`
- `GoH.reports.generate_doc_report`
Arguments:
- directory -- the location of the directory of files to evaluate.
- spelling_dictionary -- the set containing all verified words against which
the document is evaluated.
"""
corpus = (f for f in listdir(directory) if not f.startswith('.') and isfile(join(directory, f)))
statistics = []
for document in corpus:
content = utilities.readfile(directory, document)
stats = generate_doc_report(content, spelling_dictionary)
stats.update({"doc_id": document})
statistics.append(stats)
return(statistics)
def get_errors_summary(statistics):
"""
Get statistics on the errors for the whole directory.
Creates a dictionary (errors_summary) from all the reported errors/frequencies
that records the error (as key) and the total count for that error (as value).
Developed using: http://stackoverflow.com/questions/11011756,
http://stackoverflow.com/questions/27801945/
"""
all_errors = (report['errors'] for report in statistics)
errors_summary = defaultdict(int)
for doc in all_errors:
for key, value in doc.items():
errors_summary[key] += value
return errors_summary
def top_errors(errors_summary, min_count):
"""
Use the errors_summary to report the top errors.
"""
# Subset errors_summary using the min_count
frequent_errors = {key: value for key, value in errors_summary.items() if value > min_count}
# return sorted list of all errors with a count higher than the min_count
return sorted(frequent_errors.items(), key=operator.itemgetter(1), reverse=True)
def long_errors(errors_summary, min_length=10):
"""
Use the error_summary to isolate tokens that are longer thatn the min_length.
Used to identify strings of words that have been run together due to the failure
of the OCR engine to recognize whitespace.
Arguments:
- errors_summary --
"""
errors = list(errors_summary.keys())
return ([x for x in errors if len(x) > min_length], min_length)
def tokens_with_special_characters(errors_summary):
errors = list(errors_summary.keys())
special_characters = []
for error in errors:
if re.search("[^a-z0-9-']", error):
special_characters.append(error)
else:
pass
sc_dict = dict(map(lambda key: (key, errors_summary.get(key, None)), special_characters))
return sorted(sc_dict.items(), key=operator.itemgetter(1), reverse=True)
def docs_with_high_error_rate(corpus_statistics, min_error_rate=.2):
# Gather list of doc_id and num_errors
docs_2_errors = {}
for report in corpus_statistics:
docs_2_errors.update({report['doc_id']: report['error_rate']})
# Subset dictionary to get only records with error_rate above minimum
problem_docs = {key: value for key, value in docs_2_errors.items() if value > min_error_rate}
# return dictionary with doc_id and error_count if error rate higher than min_error_rate
return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse=True)
def docs_with_low_token_count(corpus_statistics, max_token_count=350):
# Gather list of doc_ids and total token count
docs_2_tokens = {}
for report in corpus_statistics:
docs_2_tokens.update({report['doc_id']: report['num_tokens']})
# Subset dictionary to get only records wth value below the max
short_docs = {key: value for key, value in docs_2_tokens.items() if value < max_token_count}
# return dictionary with doc_id and token_count if count is lower than max_token_count
return (short_docs, max_token_count)
def token_count(df):
return df['num_tokens'].sum()
def average_verified_rate(df):
""" To compute average error rate, add up the total number of tokens
and the total number of errors """
total_tokens = token_count(df)
total_errors = df['num_errors'].sum()
if total_tokens > 0:
return (total_tokens - total_errors)/total_tokens
else:
return np.nan
def average_error_rate(df):
error_sum = df['error_rate'].sum()
total_docs = len(df.index)
return error_sum/total_docs
def overview_report(directory, spelling_dictionary, title):
corpus_statistics = process_directory(directory, spelling_dictionary)
df = utilities.stats_to_df(corpus_statistics)
print("Directory: {}\n".format(directory))
print("Average verified rate: {}\n".format(average_verified_rate(df)))
print("Average of error rates: {}\n".format(average_error_rate(df)))
print("Total token count: {}\n".format(token_count(df)))
charts.chart_error_rate_distribution(df, title)
# chart_error_rate_per_doc( df, title )
return corpus_statistics
def overview_statistics(directory, spelling_dictionary, title):
"""
"""
corpus_statistics = process_directory(directory, spelling_dictionary)
return utilities.stats_to_df(corpus_statistics)
|
[
"# -*- coding: utf-8 -*-\n\nfrom os import listdir\nfrom os.path import isfile, join\nfrom nltk import FreqDist\nfrom GoH import utilities\nfrom GoH import reports\nfrom GoH import charts\nimport pandas as pd\nimport numpy as np\nimport operator\nfrom bokeh.plotting import figure, output_file, output_notebook, save, show\nimport re\nfrom collections import defaultdict\n\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens) \n \n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error:error_count})\n \n return error_report \n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return(sum(error_report.values()))\n\n\ndef error_rate(error_total, tokens):\n \"\"\" Calculates the error rate of the document to 3 decimal places.\n\n Arguments:\n error_total -- Integer. Calculated using the `total_errors` \n function from the dictionary of errors and their counts.\n tokens -- List of tokens that compose the text\n \"\"\"\n if len(tokens) > 0:\n return(float(\"{0:.3f}\".format(error_total/len(tokens))))\n else:\n return(np.nan)\n\n \ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens),\n 'num_unique_tokens': len(set(tokens)),\n 'num_errors': error_total,\n 'error_rate': rate,\n 'errors': error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and isfile(join(directory, f)))\n \n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({\"doc_id\": document})\n statistics.append(stats)\n \n return(statistics) \n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics) \n \n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n\n # Subset errors_summary using the min_count\n frequent_errors = {key: value for key, value in errors_summary.items() if value > min_count}\n\n # return sorted list of all errors with a count higher than the min_count\n return sorted(frequent_errors.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n\n return ([x for x in errors if len(x) > min_length], min_length)\n\n\ndef tokens_with_special_characters(errors_summary):\n errors = list(errors_summary.keys())\n\n special_characters = []\n for error in errors:\n if re.search(\"[^a-z0-9-']\", error):\n special_characters.append(error)\n else:\n pass\n\n sc_dict = dict(map(lambda key: (key, errors_summary.get(key, None)), special_characters))\n\n return sorted(sc_dict.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=.2):\n # Gather list of doc_id and num_errors\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n\n # Subset dictionary to get only records with error_rate above minimum\n problem_docs = {key: value for key, value in docs_2_errors.items() if value > min_error_rate}\n\n # return dictionary with doc_id and error_count if error rate higher than min_error_rate\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n # Gather list of doc_ids and total token count\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n\n # Subset dictionary to get only records wth value below the max\n short_docs = {key: value for key, value in docs_2_tokens.items() if value < max_token_count}\n\n # return dictionary with doc_id and token_count if count is lower than max_token_count\n return (short_docs, max_token_count)\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n\n if total_tokens > 0:\n return (total_tokens - total_errors)/total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n\n return error_sum/total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n\n df = utilities.stats_to_df(corpus_statistics)\n\n print(\"Directory: {}\\n\".format(directory))\n print(\"Average verified rate: {}\\n\".format(average_verified_rate(df)))\n print(\"Average of error rates: {}\\n\".format(average_error_rate(df)))\n print(\"Total token count: {}\\n\".format(token_count(df)))\n\n charts.chart_error_rate_distribution(df, title)\n # chart_error_rate_per_doc( df, title )\n\n return corpus_statistics\n\ndef overview_statistics(directory, spelling_dictionary, title):\n \"\"\"\n \"\"\"\n corpus_statistics = process_directory(directory, spelling_dictionary)\n\n return utilities.stats_to_df(corpus_statistics)",
"from os import listdir\nfrom os.path import isfile, join\nfrom nltk import FreqDist\nfrom GoH import utilities\nfrom GoH import reports\nfrom GoH import charts\nimport pandas as pd\nimport numpy as np\nimport operator\nfrom bokeh.plotting import figure, output_file, output_notebook, save, show\nimport re\nfrom collections import defaultdict\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\ndef error_rate(error_total, tokens):\n \"\"\" Calculates the error rate of the document to 3 decimal places.\n\n Arguments:\n error_total -- Integer. Calculated using the `total_errors` \n function from the dictionary of errors and their counts.\n tokens -- List of tokens that compose the text\n \"\"\"\n if len(tokens) > 0:\n return float('{0:.3f}'.format(error_total / len(tokens)))\n else:\n return np.nan\n\n\ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens), 'num_unique_tokens': len(set(tokens)\n ), 'num_errors': error_total, 'error_rate': rate, 'errors':\n error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\ndef tokens_with_special_characters(errors_summary):\n errors = list(errors_summary.keys())\n special_characters = []\n for error in errors:\n if re.search(\"[^a-z0-9-']\", error):\n special_characters.append(error)\n else:\n pass\n sc_dict = dict(map(lambda key: (key, errors_summary.get(key, None)),\n special_characters))\n return sorted(sc_dict.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\ndef overview_statistics(directory, spelling_dictionary, title):\n \"\"\"\n \"\"\"\n corpus_statistics = process_directory(directory, spelling_dictionary)\n return utilities.stats_to_df(corpus_statistics)\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\ndef error_rate(error_total, tokens):\n \"\"\" Calculates the error rate of the document to 3 decimal places.\n\n Arguments:\n error_total -- Integer. Calculated using the `total_errors` \n function from the dictionary of errors and their counts.\n tokens -- List of tokens that compose the text\n \"\"\"\n if len(tokens) > 0:\n return float('{0:.3f}'.format(error_total / len(tokens)))\n else:\n return np.nan\n\n\ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens), 'num_unique_tokens': len(set(tokens)\n ), 'num_errors': error_total, 'error_rate': rate, 'errors':\n error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\ndef tokens_with_special_characters(errors_summary):\n errors = list(errors_summary.keys())\n special_characters = []\n for error in errors:\n if re.search(\"[^a-z0-9-']\", error):\n special_characters.append(error)\n else:\n pass\n sc_dict = dict(map(lambda key: (key, errors_summary.get(key, None)),\n special_characters))\n return sorted(sc_dict.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\ndef overview_statistics(directory, spelling_dictionary, title):\n \"\"\"\n \"\"\"\n corpus_statistics = process_directory(directory, spelling_dictionary)\n return utilities.stats_to_df(corpus_statistics)\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n\n\ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens), 'num_unique_tokens': len(set(tokens)\n ), 'num_errors': error_total, 'error_rate': rate, 'errors':\n error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\ndef tokens_with_special_characters(errors_summary):\n errors = list(errors_summary.keys())\n special_characters = []\n for error in errors:\n if re.search(\"[^a-z0-9-']\", error):\n special_characters.append(error)\n else:\n pass\n sc_dict = dict(map(lambda key: (key, errors_summary.get(key, None)),\n special_characters))\n return sorted(sc_dict.items(), key=operator.itemgetter(1), reverse=True)\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\ndef overview_statistics(directory, spelling_dictionary, title):\n \"\"\"\n \"\"\"\n corpus_statistics = process_directory(directory, spelling_dictionary)\n return utilities.stats_to_df(corpus_statistics)\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n\n\ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens), 'num_unique_tokens': len(set(tokens)\n ), 'num_errors': error_total, 'error_rate': rate, 'errors':\n error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\ndef overview_statistics(directory, spelling_dictionary, title):\n \"\"\"\n \"\"\"\n corpus_statistics = process_directory(directory, spelling_dictionary)\n return utilities.stats_to_df(corpus_statistics)\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n\n\ndef generate_doc_report(text, spelling_dictionary):\n \"\"\" \n Creates a report (dictionary) on each document that includes:\n - number of tokens (num_tokens)\n - number of unique tokens (num_unique_tokens)\n - number of errors (num_errors)\n - error rate for the document (error_rate)\n - dictionary of the errors and their counts (errors)\n\n Uses a number of functions, including:\n - `GoH.utilities.strip_punct`\n - `GoH.utilities.tokenize_text`\n - `GoH.utilities.to_lower`\n - `GoH.utilities.identify_errors`\n - `GoH.reports.get_error_stats`\n - `GoH.reports.total_errors`\n - `GoH.reports.error_rate`\n\n Arguments:\n - text -- the content of the file being evaluated\n - spelling_dictionary -- a set containing the collection of verified words.\n \"\"\"\n text = utilities.strip_punct(text)\n tokens = utilities.tokenize_text(text)\n tokens = utilities.to_lower(tokens)\n errors = identify_errors(tokens, spelling_dictionary)\n error_report = get_error_stats(errors, tokens)\n error_total = total_errors(error_report)\n rate = error_rate(error_total, tokens)\n return {'num_tokens': len(tokens), 'num_unique_tokens': len(set(tokens)\n ), 'num_errors': error_total, 'error_rate': rate, 'errors':\n error_report}\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\ndef get_errors_summary(statistics):\n \"\"\"\n Get statistics on the errors for the whole directory.\n Creates a dictionary (errors_summary) from all the reported errors/frequencies\n that records the error (as key) and the total count for that error (as value).\n Developed using: http://stackoverflow.com/questions/11011756, \n http://stackoverflow.com/questions/27801945/\n \"\"\"\n all_errors = (report['errors'] for report in statistics)\n errors_summary = defaultdict(int)\n for doc in all_errors:\n for key, value in doc.items():\n errors_summary[key] += value\n return errors_summary\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\ndef docs_with_low_token_count(corpus_statistics, max_token_count=350):\n docs_2_tokens = {}\n for report in corpus_statistics:\n docs_2_tokens.update({report['doc_id']: report['num_tokens']})\n short_docs = {key: value for key, value in docs_2_tokens.items() if \n value < max_token_count}\n return short_docs, max_token_count\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\ndef average_verified_rate(df):\n \"\"\" To compute average error rate, add up the total number of tokens\n and the total number of errors \"\"\"\n total_tokens = token_count(df)\n total_errors = df['num_errors'].sum()\n if total_tokens > 0:\n return (total_tokens - total_errors) / total_tokens\n else:\n return np.nan\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\ndef long_errors(errors_summary, min_length=10):\n \"\"\"\n Use the error_summary to isolate tokens that are longer thatn the min_length. \n Used to identify strings of words that have been run together due to the failure\n of the OCR engine to recognize whitespace.\n\n Arguments:\n - errors_summary -- \n \"\"\"\n errors = list(errors_summary.keys())\n return [x for x in errors if len(x) > min_length], min_length\n\n\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\ndef total_errors(error_report):\n \"\"\" Calculates the total errors recorded in the document.\n\n Args:\n error_report (dict): Dictionary of errors and counts generated\n using `get_error_stats` function.\n \"\"\"\n return sum(error_report.values())\n\n\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n\n\ndef top_errors(errors_summary, min_count):\n \"\"\" \n Use the errors_summary to report the top errors.\n \"\"\"\n frequent_errors = {key: value for key, value in errors_summary.items() if\n value > min_count}\n return sorted(frequent_errors.items(), key=operator.itemgetter(1),\n reverse=True)\n\n\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n\n\ndef average_error_rate(df):\n error_sum = df['error_rate'].sum()\n total_docs = len(df.index)\n return error_sum / total_docs\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\ndef get_error_stats(errors, tokens):\n \"\"\" Returns a dictionary recording each error and its \n frequency in the document.\n\n Uses the FreqDist function from NLTK.\n\n Args:\n errors (set): Set of errors identified in `identify_errors`.\n tokens (list): Tokenized content of the file being evaluated.\n \"\"\"\n freq_distribution = FreqDist(tokens)\n error_report = {}\n for error in list(errors):\n error_count = freq_distribution[error]\n error_report.update({error: error_count})\n return error_report\n\n\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n<function token>\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n<function token>\n\n\ndef overview_report(directory, spelling_dictionary, title):\n corpus_statistics = process_directory(directory, spelling_dictionary)\n df = utilities.stats_to_df(corpus_statistics)\n print('Directory: {}\\n'.format(directory))\n print('Average verified rate: {}\\n'.format(average_verified_rate(df)))\n print('Average of error rates: {}\\n'.format(average_error_rate(df)))\n print('Total token count: {}\\n'.format(token_count(df)))\n charts.chart_error_rate_distribution(df, title)\n return corpus_statistics\n\n\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n\n\ndef token_count(df):\n return df['num_tokens'].sum()\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef process_directory(directory, spelling_dictionary):\n \"\"\" \n Composit function for processing an entire directory of files.\n Returns the statistics on the whole directory as a list of dictionaries.\n\n Uses the following functions:\n - `GoH.utilities.readfile`\n - `GoH.reports.generate_doc_report`\n\n Arguments:\n - directory -- the location of the directory of files to evaluate.\n - spelling_dictionary -- the set containing all verified words against which\n the document is evaluated.\n \"\"\"\n corpus = (f for f in listdir(directory) if not f.startswith('.') and\n isfile(join(directory, f)))\n statistics = []\n for document in corpus:\n content = utilities.readfile(directory, document)\n stats = generate_doc_report(content, spelling_dictionary)\n stats.update({'doc_id': document})\n statistics.append(stats)\n return statistics\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n\n\ndef docs_with_high_error_rate(corpus_statistics, min_error_rate=0.2):\n docs_2_errors = {}\n for report in corpus_statistics:\n docs_2_errors.update({report['doc_id']: report['error_rate']})\n problem_docs = {key: value for key, value in docs_2_errors.items() if \n value > min_error_rate}\n return sorted(problem_docs.items(), key=operator.itemgetter(1), reverse\n =True)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n\n\ndef identify_errors(tokens, dictionary):\n \"\"\"Compare words in documents to words in dictionary. \n\n Args:\n tokens (list): List of all tokens in the document.\n dictionary (set): The set of approved words.\n Returns:\n set : Returns the set of tokens in the documents that are not \n also dictionary words.\n \"\"\"\n return set(tokens).difference(dictionary)\n\n\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n",
"<import token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n"
] | false |
99,096 |
9e4b6301e5251a11bb433e981d753ad1080f37e9
|
import webbrowser
import os
os.system('clear')
print('powered by: stephen')
print('+fps')
nerdola = input('iniciar contador e aumentador de fps? S/N ')
if (nerdola == 'S' ):
while True:
webbrowser.open('https://www.xvideos.com/')
webbrowser.open('https://youtu.be/dQw4w9WgXcQ')
else:
print('tabom....')
|
[
"import webbrowser\nimport os\n\nos.system('clear')\n\nprint('powered by: stephen')\n\nprint('+fps')\n\nnerdola = input('iniciar contador e aumentador de fps? S/N ')\n\nif (nerdola == 'S' ):\n\n while True:\n webbrowser.open('https://www.xvideos.com/')\n\n webbrowser.open('https://youtu.be/dQw4w9WgXcQ')\n\nelse: \n print('tabom....')",
"import webbrowser\nimport os\nos.system('clear')\nprint('powered by: stephen')\nprint('+fps')\nnerdola = input('iniciar contador e aumentador de fps? S/N ')\nif nerdola == 'S':\n while True:\n webbrowser.open('https://www.xvideos.com/')\n webbrowser.open('https://youtu.be/dQw4w9WgXcQ')\nelse:\n print('tabom....')\n",
"<import token>\nos.system('clear')\nprint('powered by: stephen')\nprint('+fps')\nnerdola = input('iniciar contador e aumentador de fps? S/N ')\nif nerdola == 'S':\n while True:\n webbrowser.open('https://www.xvideos.com/')\n webbrowser.open('https://youtu.be/dQw4w9WgXcQ')\nelse:\n print('tabom....')\n",
"<import token>\nos.system('clear')\nprint('powered by: stephen')\nprint('+fps')\n<assignment token>\nif nerdola == 'S':\n while True:\n webbrowser.open('https://www.xvideos.com/')\n webbrowser.open('https://youtu.be/dQw4w9WgXcQ')\nelse:\n print('tabom....')\n",
"<import token>\n<code token>\n<assignment token>\n<code token>\n"
] | false |
99,097 |
a0f70673a1b74f01c5ac52d3cff327b44c4049d2
|
# -*- coding: utf-8 -*-
"""
Created on September 2019
@authors: Osvaldo M Velarde - Damián Dellavale - Javier Velez
@title: Module - "comodulogram"
"""
import numpy as np
from sklearn.preprocessing import scale
from scipy.signal import hilbert
import filtering
def function_setCFCcfg(CFCin):
"""
Description:
In this function we compute the structures for the "x" and "y" axis of the comodulogram.
Inputs:
- CFCin: Structure. Parameters of the comodulogram.
- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].
- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].
- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].
- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].
- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].
- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].
- 'fXlookAt': String.
Parameter of the signal observed in the range of
frequency corresponding to the "x" axis.
- 'fYlookAt': String.
Parameter of the signal observed in the range of
frequency corresponding to the "y" axis.
- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.
- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.
- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's "x" axis.
- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's "y" axis.
- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.
The LPF Filter is used to smooth the frequency time series
(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').
- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.
0 - Return a NaN.
1 - Return the filtered signals.
- 'Nbins': Int value.
Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram.
- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:
0 - Do not truncate the signal "x" to obtain the same number of cycles.
1 - Process the same number of cycles of signal "x" for all "fX" frequencies.
- 'CFCmethod': String. {'plv','mi'}
Defines the approach to compute the Cross frequency Coupling
(PLV / methods to compute the MI).
- 'verbose': Boolean {0,1}.
0: no message are shown.
1: show the messages.
- 'perMethod': String. Method by which the surrogated time series are built. Options
* 'trialShuffling'
* 'sampleShuffling'
* 'FFTphaseShuffling'
* 'cutShuffling'
- 'Nper': Int value. Number of permutations.
It defines the number of surrogate histograms per
repetition. It is worth noting that in each repetition, "Nper" surrogate histograms of size
"Nbins x NfY x NfX" are stored in memory (RAM).
- 'Nrep': Int value. Number of repetitions.
In each repetition a ".mat" file is written to disk,
containing "Nper" surrogate histograms of size "Nbins x NfY x NfX".
As a consequence, the final number of surrogate histograms is "Nper x Nrep".
- 'Pvalue': Numeric value. P-value for the statistically significant level.
- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.
Method to correct for multiple comparisons.
- 'fs': Numeric value.
Outputs:
- CFCout: Structure. Parameters of the comodulogram.
-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in "x(y)" axis.
- 'start': Numeric value. Start frequency [Hz].
- 'end': Numeric value. End frequency [Hz].
- 'res': Numeric value. Frequency resolution [Hz].
Define the frequency separation between two consecutive BPFs.
- 'BPFcfg': Structure.
Band-Pass Filter configuration for the comodulogram's "x(y)" axis.
- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of
frequency corresponding to the "x(y)" axis [none] (string).
- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.
Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf
- 'LPFcfg' Structure.
Low-Pass Filter configuration to smooth the frequency time series (structure array).
The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'
or 'PHASEofFREQUENCY').
- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.
0: Return a NaN.
1: Return the filtered signals.
- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms
(p) of the comodulogram.
- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.
0: Do not truncate the signal "x" to obtain the same number of cycles.
1: Process the same number of cycles of signal "x" for all "fX" frequencies.
- 'CFCmethod'
- 'verbose'
- 'perMethod'
- 'Nper'
- 'Nrep'
- 'Pvalue'
- 'corrMultComp'
- 'fs'
"""
# Default values of the outputs --------------------------------------------------
fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin['fXres'],
'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],
'n': CFCin['nX'], 'Nbins': CFCin['Nbins'],
'sameNumberOfCycles': CFCin['sameNumberOfCycles'],
'saveBPFsignal': CFCin['saveBPFsignal']}
fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin['fYres'],
'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],
'n': CFCin['nY'],
'saveBPFsignal': CFCin['saveBPFsignal']}
if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'].lower == 'phaseoffrequency':
fYcfg['LPFcfg'] = CFCin['LPFcfg']
# --------------------------------------------------------------------------------
# Compute the start frequency for "x" axis taking into account the bandwidth of the band-pass filter.
if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw']/2:
fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw']/2
# Compute the vector of frequency for the "x" axis -------------------------------
fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'],fXcfg['end'],np.ceil((fXcfg['end']-fXcfg['start'])/fXcfg['res']))
#np.arange(fXcfg['start'],fXcfg['end']+fXcfg['res'],fXcfg['res'])
# Compute the adaptive number of BPFs connected in series ------------------------
if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times'])>1:
fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],fXcfg['BPFcfg']['times'][-1],len(fXcfg['BPFcfg']['times']))
# Compute the bandwidth for the BPFs in the "y" axis -----------------------------
if type(fYcfg['BPFcfg']['Bw']*1.0) == float: #Constant bandwidth
fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw']*np.ones(np.shape(fXcfg['BPFcfg']['f0']))
else: # Adaptive
fYcfg['BPFcfg']['Bw'] = 2*fXcfg['BPFcfg']['f0']
# Compute the start frequency for "y" axis taking into account the bandwidth of the band-pass filter.
if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0]/2:
fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0]/2
# Compute the vector of frequency for the "y" axis --------------------------------
fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'],fYcfg['end'],np.ceil((fYcfg['end']-fYcfg['start'])/fYcfg['res']))
#fYcfg['BPFcfg']['f0'] = np.arange(fYcfg['start'],fYcfg['end']+fYcfg['res'],fYcfg['res'])
# Compute the adaptive number of BPFs connected in series -------------------------
if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times'])>1:
fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],fYcfg['BPFcfg']['times'][-1],len(fYcfg['BPFcfg']['times']))
# Compute the output structure ----------------------------------------------------
CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg,
'CFCmethod': CFCin['CFCmethod'],
'verbose': CFCin['verbose'], 'perMethod': CFCin['perMethod'],
'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],
'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],
'fs': CFCin['fs']}
return CFCout
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
FILTERS_SWITCHER = {'function_FDF': filtering.function_FDF,
'function_eegfilt':filtering.function_eegfilt,
'function_butterBPF':filtering.function_butterBPF}
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
def function_comodulogramBPF(signal,BPFcfg,fs,indSettlingExt):
"""
Description:
In this function we implement the Band-Pass Filtering of the input signal.
The input signal is supposed to be a raw (unfiltered) time series.
Inputs:
- signal: Numeric array (Nsamples x 1). Data.
- BPFcfg: Structure.
Band-Pass Filter configuration for the comodulogram's "x(y)" axis.
- 'function': string {'function_butterBPF', 'function_eegfilt', 'function_FDF'}
It specifies the function for the Band-Pass Filter:
* 'function_butterBPF', a BPF IIR filter is implemented using a series connection of a
High-Pass followed by a Low-Pass Butterworth filters.
* 'function_eegfilt', a BPF FIR filter is implemented using the "eegfilt.m" function from
EEGLAB toolbox.
* 'function_FDF', a Frequency Domain Filtering is implemented using a window function.
- fs: Numeric value. Sampling rate [Hz].
- indSettlingExt: Int value. External index for cutting out the transient response of the BPFs.
If "indSettlingExt" is empty or NaN, the index for the longest settling time is used.
Outputs:
- indSettlingMax: Int value. Index corresponding to the longest transient response of the BPFs.
- BPFsignal: Numeric array (Nsamples x Nf x NBw). Band-Pass Filtered signals.
where: Ns = np.shape[signal,0]. Number of samples.
Nf = len(fcfg['BPFcfg']['f0']). Number of frequencies.
NBw = len(fcfg['BPFcfg']['Bw']). Number of Bandwidths.
"""
# Argument completion ------------------------------------------------------
# if (nargin < 4)||isempty(signal)...
# ||isempty(BPFcfg)...
# ||isempty(fs)...
# ||isempty(indSettlingExt),...
# error('MATLAB:function_comodulogramBPF','Input argument error.');
# end
if 'f1' in BPFcfg.keys() and 'f2' in BPFcfg.keys():
# Compute the cutoff frequencies.
BPFcfg['f0'] = (BPFcfg['f1'] + BPFcfg['f2']) / 2 # Arithmetic mean.
# BPFcfg['f0'] = np.sqrt(BPFcfg['f1'] * BPFcfg['f2']) %Geometric mean.
# %Ref: https://en.wikipedia.org/wiki/Center_frequency
BPFcfg['Bw'] = BPFcfg['f2'] - BPFcfg['f1']
#elseif ~isfield(BPFcfg, 'f0') || ~isfield(BPFcfg, 'Bw'),
# error('MATLAB:function_comodulogramBPF','Error in the BPF configuration (BPFcfg).');
# --------------------------------------------------------------------------
# Check the input arguments ------------------------------------------------
#assert(size(signal,2)==1, 'Input argument error in function "function_comodulogramBPF": The signal must be a column array.');
#assert(isstruct(BPFcfg), 'Input argument error in function "function_comodulogramBPF": BPFcfg must be a structure array.');
#assert(isnumeric(indSettlingExt)&&(indSettlingExt>0)&&(length(indSettlingExt)==1),...
# 'Input argument error in function "function_comodulogramBPFandFeature": The value for "indSettlingExt" is not valid.');
# --------------------------------------------------------------------------
# Default values of the outputs --------------------------------------------
Nf = np.size(BPFcfg['f0']) # Number of frequencies.
NBw = np.size(BPFcfg['Bw']) # Number of Bandwidths.
fnyq = fs/2 # [Hz] Nyquist frequency.
Ncycle = np.round(fs / np.atleast_1d(BPFcfg['f0'])[0]) # Compute the samples per period for the minimum frequency.
Ns = np.shape(signal)[0] # Compute the number of samples of the input signal.
Ns_cropped = Ns - 2*(indSettlingExt-1) # Compute the final length of the time series after clipping.
# if Ncycle >= Ns_cropped:
# error('MATLAB:function_comodulogramBPF',...
# 'The time series is too short: it does not include at least one period of the minimum frequency.')
# --------------------------------------------------------------------------
# Initializes the index corresponding to the maximum settling time with the external value.
indSettlingMax = indSettlingExt
# --------------------------------------------------------------------------
## Band-Pass Filtering -----------------------------------------------------
BPFsignal = np.zeros((Ns_cropped, Nf, NBw)) # Memory pre-allocation.
for ii in range(NBw): # Loop for Bandwidths.
BPFsignal_local = np.zeros((Ns, Nf)) # Memory pre-allocation.
indSettling = np.zeros((1, Nf)) # Memory pre-allocation.
for jj in range(Nf): # Loop for frequencies.
BPFcfg_local = BPFcfg # Extract the parameters for the BPF configuration.
BPFcfg_local['Bw'] = np.atleast_1d(BPFcfg['Bw'])[ii]
BPFcfg_local['f0'] = np.atleast_1d(BPFcfg['f0'])[jj]
# Do not compute the cases in which,
# 1) the lower cutoff frequency is lesser than or equal to zero.
# 2) the higher cutoff frequency is greater than or equal to one.
# Ref: Lega 2014 PAC in human hippocampus.pdf
if (BPFcfg_local['f0']-BPFcfg_local['Bw']/2)<=fs/Ns or (BPFcfg_local['f0']+BPFcfg_local['Bw']/2)/fnyq>=1:
continue
# -------------------------------------------------------------------
filter_function = FILTERS_SWITCHER.get(BPFcfg_local['function'], lambda: "Invalid method") # Switch for filter selection.
BPFsignal_localjj, indSettling[jj], _ , _ = filter_function(signal, BPFcfg_local, fs)
BPFsignal_local[:,jj] = np.real(np.squeeze(BPFsignal_localjj))
# VER: Para el caso butterBPF, se ejecutaba esto antes de filter_function
# case 'function_butterBPF', # Band-Pass Filter (IIR) using a series connection of a High-Pass followed by a Low-Pass Butterworth filters.
# if length(BPFcfg_local.times)>1, %Adaptive number of BPFs connected in series.
# BPFcfg_local.times = BPFcfg.times(jj);
# -----------------------------------------------------------------------
# Cut out the transient response of the BPFs -----------------------------
indSettlingMax = max([indSettling, indSettlingMax]) # Compute the index for the largest settling time.
if indSettlingMax > indSettlingExt: # Compare the internal and external settling time indices.
print('Un msj no implementado- function_comodulogramBPF_v1_225')
#warning('MATLAB:function_comodulogramBPF',...
# 'The transient response have not completely removed using "indSettlingExt":');
#display(['Relative difference = ' num2str(100*(indSettlingExt-indSettlingMax)/indSettlingMax) '%.']);
BPFsignal_local = BPFsignal_local[indSettlingExt-1:BPFsignal_local.shape[0]-(indSettlingExt-1),:] # Cutting out the BPFs' transient response.
# -----------------------------------------------------------------------
BPFsignal[:,:,ii] = BPFsignal_local
#This is required in the case of a single Bandwidth.
#if NBw==1:
# BPFsignal = np.squeeze(BPFsignal)
return BPFsignal, indSettlingMax
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
def function_feature_phase(signal):
"""
Description:
Compute the phase of the z-scored BPF signal.
Remark:
Before the computation of the phase signal, the time series should be
normalized, de-trended, or mean-subtracted to have the DC-component removed.
this ensures that phase values are not limited in range.
Ref:
Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf
Angle [rad] in (-pi,pi]
"""
return np.angle(hilbert(scale(signal),axis=0))
def function_feature_amplitude(signal):
"""
Description:
Compute the amplitude (signal envelope).
Amplitude envelope of the signal (AM demodulation).
"""
return np.abs(hilbert(signal,axis=0))
def function_feature_phofamp(signal):
"""
Description:
Phase of the signal's amplitude envelope.
Remark:
Before the computation of the phase signal, the time series should be
normalized, de-trended, or mean-subtracted to have the DC-component removed;
this ensures that phase values are not limited in range.
Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf
"""
BPFfeature = np.abs(hilbert(signal,axis=0)) # Compute the amplitudes (signal envelope).
BPFfeature = scale(BPFfeature) # Normalization in order to avoid phase skew.
BPFfeature = np.angle(hilbert(BPFfeature,axis=0)) # Compute the phase of the envelope. [rad] range:(-pi,pi]
return BPFfeature
def function_feature_frequency(signal):
print('Sin implementar. Devuelve 0')
return 0
def function_feature_phoffreq(signal):
print('Sin implementar. Devuelve 0')
return 0
LIST_FEATURES = {'phase':function_feature_phase,
'amplitude':function_feature_amplitude,
'phaseofamplitude':function_feature_phofamp,
'frequency':function_feature_frequency,
'phaseoffrequency':function_feature_phoffreq}
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
def function_comodulogramFeature(signal,fcfg,fs,indSettlingExt):
"""
Description:
In this function we implement the extraction of the phase/amplitude/frequency
time series from the input signals. The input signals are supposed to be
previously Band-Pass Filtered signals around the frequency bands of interest.
Inputs:
- signal. Numeric array (Ns x Nf x NBw)
Band-Pass Filtered signals. Notation:
Ns: Number of samples.
Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])
NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])
- fcfg. Structure. Parameters of the Frequency Band in "x(y)" axis.
- 'start': Numeric value. Start frequency [Hz].
- 'end': Numeric value. End frequency [Hz].
- 'res': Numeric value. Frequency resolution [Hz].
Define the frequency separation between two consecutive BPFs.
- 'BPFcfg': Structure.
Band-Pass Filter configuration for the comodulogram's "x(y)" axis.
- 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of
frequency corresponding to the "x(y)" axis [none].
- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.
The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').
- 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.
*0: Return a NaN.
*1: Return the filtered signals.
- 'Nbins': Integer value.
Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram.
- 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.
*0: Do not truncate the signal "x" to obtain the same number of cycles.
*1: Process the same number of cycles of signal "x" for all "fX" frequencies.
- fs: Numeric value. Sampling rate [Hz].
- indSettlingExt: Integer value.
External index for cutting out the transient response of the BPFs.
If "indSettlingExt" is empty or NaN, the index for the longest settling time is used.
Outputs:
- BPFfeature: Numeric array (Ns x NF x NBw)
Phase/amplitud/frequency time series for the "x" or "y" axis of the comodulogram
- croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw)
Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)
"""
# %Argument completion ------------------------------------------------------
# if (nargin < 4)||isempty(signal)...
# ||isempty(fcfg)...
# ||isempty(fs)...
# ||isempty(indSettlingExt),...
# error('MATLAB:function_comodulogramFeature','Input argument error.');
# end
if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():
# Compute the cutoff frequencies.
fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']) / 2 # Arithmetic mean.
#%fcfg.BPFcfg.f0 = sqrt(fcfg.BPFcfg.f1 * fcfg.BPFcfg.f2); %Geometric mean.
#%Ref: https://en.wikipedia.org/wiki/Center_frequency
fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']
#elif ~isfield(fcfg.BPFcfg, 'f0') || ~isfield(fcfg.BPFcfg, 'Bw'),
# error('MATLAB:function_comodulogramFeature','Error in the BPF configuration (BPFcfg).');
# Check the input arguments ------------------------------------------------
# assert(max(size(signal))==size(signal,1), 'Input argument error in function "function_comodulogramFeature": The signal must be a column array.');
# assert(isstruct(fcfg), 'Input argument error in function "function_comodulogramFeature": fcfg must be a structure array.');
# assert(isstruct(fcfg.BPFcfg), 'Input argument error in function "function_comodulogramFeature": BPFcfg structure not found.');
# assert(isnumeric(indSettlingExt)&&(indSettlingExt>0)&&(length(indSettlingExt)==1),...
# 'Input argument error in function "function_comodulogramBPFandFeature": The value for "indSettlingExt" is not valid.');
# Default values of the outputs --------------------------------------------
croppedSignal = []
Nf = np.size(fcfg['BPFcfg']['f0']) # Number of frequencies.
NBw = np.size(fcfg['BPFcfg']['Bw']) # Number of Bandwidths.
fnyq = fs/2 # [Hz] Nyquist frequency.
Ns = np.shape(signal)[0] # Compute the number of samples of the input signal.
Ns_cropped = Ns - 2*(indSettlingExt-1) # Compute the final length of the time series after clipping.
# --------------------------------------------------------------------------
# Feature extraction -------------------------------------------------------
BPFfeature = np.zeros((Ns_cropped, Nf, NBw)) # Memory pre-allocation for speed up the loop.
if fcfg['saveBPFsignal']:
croppedSignal = np.zeros((Ns_cropped, Nf, NBw))
for ii in range(NBw): # Loop for Bandwidths.
signal_local = signal[:,:,ii]
# Selection and computation of features --------------------------------
feature = fcfg['lookAt'].lower()
function_feature = LIST_FEATURES.get(feature, lambda: "Invalid method")
BPFfeature_local= function_feature(signal_local)
# ----------------------------------------------------------------------
BPFfeature_local = BPFfeature_local[indSettlingExt-1:BPFfeature_local.shape[0]-(indSettlingExt-1),:] # We remove the transient due to the Hilbert transform.
BPFfeature[:,:,ii] = BPFfeature_local
if fcfg['saveBPFsignal']:
# Cutting out the transient response AFTER the phase/amplitude/frequency extraction.
croppedSignal[:,:,ii] = signal_local[indSettlingExt-1:signal_local.shape[0]-(indSettlingExt-1),:]
# ----------------------------------------------------------------------
# This is required in the case of a single Bandwidth. (VER)
# if NBw==1:
# BPFfeature = np.squeeze(BPFfeature)
# croppedSignal = np.squeeze(croppedSignal)
return BPFfeature, croppedSignal
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
def function_PLV(x,y, wx, wy, CFCcfg):
"""
Description:
In this function we compute the Phase Locking Values.
Refs:
[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m
[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198
[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf
[4] Phase Locking from Noisy Data (Tass, 1998).pdf
Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf
Inputs:
- x: Numeric array (Nsamples x NfX).
Data for the comodulogram's "x" axis (matrix: samples x NfX).
- y: Numeric array (Nsamples x NfY x NfX).
Data for the comodulogram's "y" axis (matrix: samples x NfY x NfX).
- wx: Numeric array (Nsamples x NfX).
Weights related to the comodulogram's "x" axis (matrix: samples x NfX).
- wy: Numeric array (Nsamples x NfY x NfX).
Weights related to the comodulogram's "y" axis (matrix: samples x NfY x NfX).
- CFCcfc: structure.
Parameters of the comodulogram (structure array)
- 'fXcfg': structure.
Parameters of the Frequency Band in "x" axis.
- 'fYcfg': structure.
Parameters of the Frequency Band in "y" axis.
-'start': Numeric value. Start frequency [Hz].
-'end': Numeric value. End frequency [Hz].
-'res': Numeric value. Frequency resolution.
Define the frequency separation between two consecutive BPFs.
-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.
-'lookAt': String.
Parameter of the signal (phase/amplitude) observed in the range of
frequency [none] (string).
-'n': Int value. Harmonic number for detection of phase locking.
-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.
0 - Return a NaN.
1 - Return the filtered signals.
-'Nbins': Int value.
Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram.
-'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.
0 - Do not truncate the signal "x" to obtain the same number
of cycles.
1 - Process the same number of cycles of signal "x" for all
"fX" frequencies.
- 'CFCmethod': String.
Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.
- 'verbose': Boolean. Display flag.
- 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}.
Method by which the surrogated time series are built.
- 'Nper': Int value.
Number of permutations. It defines the number of surrogate histograms per
repetition. It is worth noting that in each repetition, "Nper" surrogate
histograms of size "Nbins x NfY x NfX" are stored in memory (RAM).
- 'Nrep': Int value.
Number of repetitions. In each repetition a ".mat" file is written to disk,
containing "Nper" surrogate histograms of size "Nbins x NfY x NfX".
As a consequence, the final number of surrogate histograms is "Nper x Nrep".
- 'Pvalue': Numeric value. P-value for the statistically significant level.
- 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.
- 'fs': Numeric value. Sampling rate [Hz].
Outputs:
- PLV: Numeric array (NfY x NfX).
Phase Locking Value.
- wxPLV: Numeric array (NfY x NfX).
Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).
- wyPLV: Numeric array (NfY x NfX).
Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).
NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])
NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])
"""
## Argument completion
# if (nargin < 5)||isempty(x)||isempty(y)||isempty(CFCcfg),...
# error('MATLAB:function_PLV','Input argument error.');
# end
## Check the input arguments
# assert(isstruct(CFCcfg), 'Input argument error in function "function_PLV": CFCcfg must be a structure array.');
# if ~isfield(CFCcfg.fXcfg, 'n')||isempty(CFCcfg.fXcfg.n)||isnan(CFCcfg.fXcfg.n),
# CFCcfg.fXcfg.n = 1; %Default value.
# warning('MATLAB:function_PLV', ['"CFCcfg.fXcfg.n" is not specified, the default value is used: CFCcfg.fXcfg.n = ',...
# num2str(CFCcfg.fXcfg.n)]);
# end
# if ~isfield(CFCcfg.fYcfg, 'n')||isempty(CFCcfg.fYcfg.n)||isnan(CFCcfg.fYcfg.n),
# CFCcfg.fYcfg.n = 1; %Default value.
# warning('MATLAB:function_PLV', ['"CFCcfg.fYcfg.n" is not specified, the default value is used: CFCcfg.fYcfg.n = ',...
# num2str(CFCcfg.fYcfg.n)]);
# end
# assert(length(size(x))==2 &&...
# size(x,2)==length(CFCcfg.fXcfg.BPFcfg.f0) &&...
# size(x,1)==max(size(x)),...
# 'Input argument error in function "function_PLV": Wrong shape of the input matrix "x".');
# assert(length(size(y))<=3 &&...
# size(y,3)==length(CFCcfg.fXcfg.BPFcfg.f0) &&...
# size(y,2)==length(CFCcfg.fYcfg.BPFcfg.f0) &&...
# size(y,1)==max(size(y)),...
# 'Input argument error in function "function_PLV": Wrong shape of the input matrix "y".');
# if ~isempty(wx),
# assert(isequal(size(wx),size(x)),...
# 'Input argument error in function "function_PLV": Wrong shape of the input matrix "wx".');
# end
# if ~isempty(wy),
# assert(isequal(size(wy),size(y)),...
# 'Input argument error in function "function_PLV": Wrong shape of the input matrix "wy".');
# end
# Default values of the outputs ----------------------------------
wxPLV = []
wyPLV = []
# ----------------------------------------------------------------
# Parameters -----------------------------------------------------
NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0']) # Compute the length of the frequency vectors.
NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0']) # Compute the length of the frequency vectors.
Ns = np.shape(x)[0] # Number of samples
nX = CFCcfg['fXcfg']['n'] # Compute the harmonic number for detection of nX:nY phase locking
nY = CFCcfg['fYcfg']['n'] # Compute the harmonic number for detection of nX:nY phase locking
# ----------------------------------------------------------------
# Compute the modulation index "PLV" ---------------------------------------
PLV = np.zeros((NfY,NfX),dtype=complex) # Memory pre-allocation for speed up the loop.
for ii in range(NfY): # Loop across the "y" frequencies.
PLV[ii,:] = np.sum(np.exp(1j * (nX*x - nY*y[:,ii,:])),0) / Ns
# ---------------------------------------------------------------------------
# # Compute the modulation index "wxPLV" -------------------------------------
# if ~isempty(wx):
# wxPLV = np.zeros((NfY,NfX)) # Memory pre-allocation for speed up the loop.
# for ii in range(NfY): # Loop across the "y" frequencies.
# wxPLV(ii,:) = sum(wx.*exp(1j*(nX*x-nY*squeeze(y(:,ii,:)))),1) / Ns;
# # -------------------------------------------------------------------------
# # Compute the modulation index "wyPLV" -------------------------------------
# if ~isempty(wy):
# wyPLV = np.zeros((NfY,NfX)) # Memory pre-allocation for speed up the loop.
# for ii in range(NfY): # Loop across the "y" frequencies.
# wyPLV(ii,:) = sum(squeeze(wy(:,ii,:)).*exp(1j*(nX*x-nY*squeeze(y(:,ii,:)))),1) / Ns;
# # -------------------------------------------------------------------------
return PLV, wxPLV, wyPLV
# --------------------------------------------------------------------------------------
# --------------------------------------------------------------------------------------
|
[
"# -*- coding: utf-8 -*-\n\"\"\"\nCreated on September 2019\n@authors: Osvaldo M Velarde - Damián Dellavale - Javier Velez\n@title: Module - \"comodulogram\"\n\"\"\"\n\nimport numpy as np\n\nfrom sklearn.preprocessing import scale\nfrom scipy.signal import hilbert\n\nimport filtering\n\ndef function_setCFCcfg(CFCin):\n\n\t\"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n\n\t# Default values of the outputs --------------------------------------------------\n\tfXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin['fXres'],\n\t\t \t 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n\t\t \t 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'],\n\t\t \t 'sameNumberOfCycles': CFCin['sameNumberOfCycles'],\n\t\t \t 'saveBPFsignal': CFCin['saveBPFsignal']}\n\n\tfYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin['fYres'],\n\t\t\t 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n\t\t \t 'n': CFCin['nY'],\n\t\t \t 'saveBPFsignal': CFCin['saveBPFsignal']}\n\n\tif fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'].lower == 'phaseoffrequency':\n\t\tfYcfg['LPFcfg'] = CFCin['LPFcfg']\n\t# --------------------------------------------------------------------------------\n\n\t# Compute the start frequency for \"x\" axis taking into account the bandwidth of the band-pass filter.\n\tif CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw']/2:\n\t\tfXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw']/2\n\n\t# Compute the vector of frequency for the \"x\" axis ------------------------------- \n\tfXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'],fXcfg['end'],np.ceil((fXcfg['end']-fXcfg['start'])/fXcfg['res']))\n\t#np.arange(fXcfg['start'],fXcfg['end']+fXcfg['res'],fXcfg['res'])\n\n\t# Compute the adaptive number of BPFs connected in series ------------------------\n\tif 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times'])>1:\n\t\tfXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],fXcfg['BPFcfg']['times'][-1],len(fXcfg['BPFcfg']['times']))\n\n\t# Compute the bandwidth for the BPFs in the \"y\" axis ----------------------------- \n\tif type(fYcfg['BPFcfg']['Bw']*1.0) == float: \t#Constant bandwidth\t\t\t\n\t\tfYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw']*np.ones(np.shape(fXcfg['BPFcfg']['f0']))\n\telse:\t\t\t\t\t\t\t\t\t\t\t# Adaptive\n\t\tfYcfg['BPFcfg']['Bw'] = 2*fXcfg['BPFcfg']['f0']\n\n\t# Compute the start frequency for \"y\" axis taking into account the bandwidth of the band-pass filter.\n\tif fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0]/2:\n\t\tfYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0]/2\n\n\t# Compute the vector of frequency for the \"y\" axis --------------------------------\n\tfYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'],fYcfg['end'],np.ceil((fYcfg['end']-fYcfg['start'])/fYcfg['res'])) \n\t#fYcfg['BPFcfg']['f0'] = np.arange(fYcfg['start'],fYcfg['end']+fYcfg['res'],fYcfg['res']) \n\n\t# Compute the adaptive number of BPFs connected in series -------------------------\n\tif 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times'])>1:\n\t\tfYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],fYcfg['BPFcfg']['times'][-1],len(fYcfg['BPFcfg']['times']))\n\n\t# Compute the output structure ----------------------------------------------------\n\tCFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg,\n\t \t\t \t'CFCmethod': CFCin['CFCmethod'],\n\t\t\t\t'verbose': CFCin['verbose'], 'perMethod': CFCin['perMethod'],\n\t\t\t\t'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n\t\t\t\t'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n\t\t\t\t'fs': CFCin['fs']}\n\n\treturn CFCout\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\nFILTERS_SWITCHER = {'function_FDF': filtering.function_FDF,\n 'function_eegfilt':filtering.function_eegfilt,\n 'function_butterBPF':filtering.function_butterBPF} \n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\ndef function_comodulogramBPF(signal,BPFcfg,fs,indSettlingExt):\n\n \"\"\"\n Description:\n In this function we implement the Band-Pass Filtering of the input signal.\n The input signal is supposed to be a raw (unfiltered) time series.\n\n\n Inputs:\n - signal: Numeric array (Nsamples x 1). Data.\n - BPFcfg: Structure. \n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'function': string {'function_butterBPF', 'function_eegfilt', 'function_FDF'}\n It specifies the function for the Band-Pass Filter:\n * 'function_butterBPF', a BPF IIR filter is implemented using a series connection of a\n High-Pass followed by a Low-Pass Butterworth filters.\n * 'function_eegfilt', a BPF FIR filter is implemented using the \"eegfilt.m\" function from\n EEGLAB toolbox.\n * 'function_FDF', a Frequency Domain Filtering is implemented using a window function. \n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Int value. External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - indSettlingMax: Int value. Index corresponding to the longest transient response of the BPFs.\n - BPFsignal: Numeric array (Nsamples x Nf x NBw). Band-Pass Filtered signals.\n where: Ns = np.shape[signal,0]. Number of samples.\n Nf = len(fcfg['BPFcfg']['f0']). Number of frequencies.\n NBw = len(fcfg['BPFcfg']['Bw']). Number of Bandwidths.\n \"\"\"\n\n # Argument completion ------------------------------------------------------\n # if (nargin < 4)||isempty(signal)...\n # ||isempty(BPFcfg)...\n # ||isempty(fs)...\n # ||isempty(indSettlingExt),...\n # error('MATLAB:function_comodulogramBPF','Input argument error.');\n # end\n\n if 'f1' in BPFcfg.keys() and 'f2' in BPFcfg.keys():\n # Compute the cutoff frequencies.\n BPFcfg['f0'] = (BPFcfg['f1'] + BPFcfg['f2']) / 2 # Arithmetic mean.\n # BPFcfg['f0'] = np.sqrt(BPFcfg['f1'] * BPFcfg['f2']) %Geometric mean.\n # %Ref: https://en.wikipedia.org/wiki/Center_frequency\n BPFcfg['Bw'] = BPFcfg['f2'] - BPFcfg['f1']\n #elseif ~isfield(BPFcfg, 'f0') || ~isfield(BPFcfg, 'Bw'),\n # error('MATLAB:function_comodulogramBPF','Error in the BPF configuration (BPFcfg).');\n\n # --------------------------------------------------------------------------\n\n # Check the input arguments ------------------------------------------------\n #assert(size(signal,2)==1, 'Input argument error in function \"function_comodulogramBPF\": The signal must be a column array.');\n #assert(isstruct(BPFcfg), 'Input argument error in function \"function_comodulogramBPF\": BPFcfg must be a structure array.');\n #assert(isnumeric(indSettlingExt)&&(indSettlingExt>0)&&(length(indSettlingExt)==1),...\n # 'Input argument error in function \"function_comodulogramBPFandFeature\": The value for \"indSettlingExt\" is not valid.');\n # --------------------------------------------------------------------------\n\n # Default values of the outputs --------------------------------------------\n Nf = np.size(BPFcfg['f0']) # Number of frequencies.\n NBw = np.size(BPFcfg['Bw']) # Number of Bandwidths.\n fnyq = fs/2 # [Hz] Nyquist frequency.\n Ncycle = np.round(fs / np.atleast_1d(BPFcfg['f0'])[0]) # Compute the samples per period for the minimum frequency. \n Ns = np.shape(signal)[0] # Compute the number of samples of the input signal.\n Ns_cropped = Ns - 2*(indSettlingExt-1) # Compute the final length of the time series after clipping.\n\n # if Ncycle >= Ns_cropped:\n # error('MATLAB:function_comodulogramBPF',...\n # 'The time series is too short: it does not include at least one period of the minimum frequency.')\n\n # --------------------------------------------------------------------------\n\n # Initializes the index corresponding to the maximum settling time with the external value.\n indSettlingMax = indSettlingExt\n # --------------------------------------------------------------------------\n\n ## Band-Pass Filtering -----------------------------------------------------\n \n BPFsignal = np.zeros((Ns_cropped, Nf, NBw)) # Memory pre-allocation.\n\n for ii in range(NBw): # Loop for Bandwidths.\n BPFsignal_local = np.zeros((Ns, Nf)) # Memory pre-allocation.\n indSettling = np.zeros((1, Nf)) # Memory pre-allocation.\n\n for jj in range(Nf): # Loop for frequencies.\n BPFcfg_local = BPFcfg # Extract the parameters for the BPF configuration.\n BPFcfg_local['Bw'] = np.atleast_1d(BPFcfg['Bw'])[ii]\n BPFcfg_local['f0'] = np.atleast_1d(BPFcfg['f0'])[jj] \n\n # Do not compute the cases in which,\n # 1) the lower cutoff frequency is lesser than or equal to zero.\n # 2) the higher cutoff frequency is greater than or equal to one.\n # Ref: Lega 2014 PAC in human hippocampus.pdf\n\n if (BPFcfg_local['f0']-BPFcfg_local['Bw']/2)<=fs/Ns or (BPFcfg_local['f0']+BPFcfg_local['Bw']/2)/fnyq>=1:\n continue\n # -------------------------------------------------------------------\n\n filter_function = FILTERS_SWITCHER.get(BPFcfg_local['function'], lambda: \"Invalid method\") # Switch for filter selection.\n BPFsignal_localjj, indSettling[jj], _ , _ = filter_function(signal, BPFcfg_local, fs)\n BPFsignal_local[:,jj] = np.real(np.squeeze(BPFsignal_localjj))\n \n # VER: Para el caso butterBPF, se ejecutaba esto antes de filter_function\n # case 'function_butterBPF', # Band-Pass Filter (IIR) using a series connection of a High-Pass followed by a Low-Pass Butterworth filters.\n # if length(BPFcfg_local.times)>1, %Adaptive number of BPFs connected in series.\n # BPFcfg_local.times = BPFcfg.times(jj);\n \n # -----------------------------------------------------------------------\n \n # Cut out the transient response of the BPFs -----------------------------\n indSettlingMax = max([indSettling, indSettlingMax]) # Compute the index for the largest settling time.\n\n if indSettlingMax > indSettlingExt: # Compare the internal and external settling time indices.\n print('Un msj no implementado- function_comodulogramBPF_v1_225')\n #warning('MATLAB:function_comodulogramBPF',...\n # 'The transient response have not completely removed using \"indSettlingExt\":');\n #display(['Relative difference = ' num2str(100*(indSettlingExt-indSettlingMax)/indSettlingMax) '%.']); \n\n BPFsignal_local = BPFsignal_local[indSettlingExt-1:BPFsignal_local.shape[0]-(indSettlingExt-1),:] # Cutting out the BPFs' transient response. \n # -----------------------------------------------------------------------\n\n BPFsignal[:,:,ii] = BPFsignal_local\n\n #This is required in the case of a single Bandwidth.\n #if NBw==1:\n # BPFsignal = np.squeeze(BPFsignal)\n\n return BPFsignal, indSettlingMax\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n\n return np.angle(hilbert(scale(signal),axis=0))\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n\n return np.abs(hilbert(signal,axis=0))\n\ndef function_feature_phofamp(signal):\n \"\"\" \n Description:\n Phase of the signal's amplitude envelope.\n \n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed;\n this ensures that phase values are not limited in range.\n \n Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n \"\"\"\n\n BPFfeature = np.abs(hilbert(signal,axis=0)) # Compute the amplitudes (signal envelope).\n BPFfeature = scale(BPFfeature) # Normalization in order to avoid phase skew. \n BPFfeature = np.angle(hilbert(BPFfeature,axis=0)) # Compute the phase of the envelope. [rad] range:(-pi,pi]\n\n return BPFfeature\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\nLIST_FEATURES = {'phase':function_feature_phase,\n 'amplitude':function_feature_amplitude,\n 'phaseofamplitude':function_feature_phofamp,\n 'frequency':function_feature_frequency,\n 'phaseoffrequency':function_feature_phoffreq}\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\ndef function_comodulogramFeature(signal,fcfg,fs,indSettlingExt):\n\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n\n # %Argument completion ------------------------------------------------------\n # if (nargin < 4)||isempty(signal)...\n # ||isempty(fcfg)...\n # ||isempty(fs)...\n # ||isempty(indSettlingExt),...\n # error('MATLAB:function_comodulogramFeature','Input argument error.');\n # end\n\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n # Compute the cutoff frequencies.\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']) / 2 # Arithmetic mean.\n #%fcfg.BPFcfg.f0 = sqrt(fcfg.BPFcfg.f1 * fcfg.BPFcfg.f2); %Geometric mean.\n #%Ref: https://en.wikipedia.org/wiki/Center_frequency\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n #elif ~isfield(fcfg.BPFcfg, 'f0') || ~isfield(fcfg.BPFcfg, 'Bw'),\n # error('MATLAB:function_comodulogramFeature','Error in the BPF configuration (BPFcfg).');\n\n # Check the input arguments ------------------------------------------------\n # assert(max(size(signal))==size(signal,1), 'Input argument error in function \"function_comodulogramFeature\": The signal must be a column array.');\n # assert(isstruct(fcfg), 'Input argument error in function \"function_comodulogramFeature\": fcfg must be a structure array.');\n # assert(isstruct(fcfg.BPFcfg), 'Input argument error in function \"function_comodulogramFeature\": BPFcfg structure not found.');\n # assert(isnumeric(indSettlingExt)&&(indSettlingExt>0)&&(length(indSettlingExt)==1),...\n # 'Input argument error in function \"function_comodulogramBPFandFeature\": The value for \"indSettlingExt\" is not valid.');\n\n\n # Default values of the outputs --------------------------------------------\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0']) # Number of frequencies.\n NBw = np.size(fcfg['BPFcfg']['Bw']) # Number of Bandwidths.\n fnyq = fs/2 # [Hz] Nyquist frequency.\n Ns = np.shape(signal)[0] # Compute the number of samples of the input signal.\n Ns_cropped = Ns - 2*(indSettlingExt-1) # Compute the final length of the time series after clipping.\n # --------------------------------------------------------------------------\n \n # Feature extraction -------------------------------------------------------\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw)) # Memory pre-allocation for speed up the loop.\n\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n\n for ii in range(NBw): # Loop for Bandwidths.\n\n signal_local = signal[:,:,ii]\n \n # Selection and computation of features --------------------------------\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda: \"Invalid method\")\n BPFfeature_local= function_feature(signal_local)\n\n # ----------------------------------------------------------------------\n\n BPFfeature_local = BPFfeature_local[indSettlingExt-1:BPFfeature_local.shape[0]-(indSettlingExt-1),:] # We remove the transient due to the Hilbert transform. \n BPFfeature[:,:,ii] = BPFfeature_local\n\n if fcfg['saveBPFsignal']:\n # Cutting out the transient response AFTER the phase/amplitude/frequency extraction.\n croppedSignal[:,:,ii] = signal_local[indSettlingExt-1:signal_local.shape[0]-(indSettlingExt-1),:] \n\n # ----------------------------------------------------------------------\n # This is required in the case of a single Bandwidth. (VER)\n # if NBw==1:\n # BPFfeature = np.squeeze(BPFfeature)\n # croppedSignal = np.squeeze(croppedSignal)\n\n return BPFfeature, croppedSignal\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------\n\ndef function_PLV(x,y, wx, wy, CFCcfg):\n\t\"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n\n\t## Argument completion\n\n\t# if (nargin < 5)||isempty(x)||isempty(y)||isempty(CFCcfg),...\n\t# error('MATLAB:function_PLV','Input argument error.');\n\t# end\n\n\t## Check the input arguments\n\t# assert(isstruct(CFCcfg), 'Input argument error in function \"function_PLV\": CFCcfg must be a structure array.');\n\n\t# if ~isfield(CFCcfg.fXcfg, 'n')||isempty(CFCcfg.fXcfg.n)||isnan(CFCcfg.fXcfg.n),\n\t# CFCcfg.fXcfg.n = 1; %Default value.\n\t# warning('MATLAB:function_PLV', ['\"CFCcfg.fXcfg.n\" is not specified, the default value is used: CFCcfg.fXcfg.n = ',...\n\t# num2str(CFCcfg.fXcfg.n)]); \n\t# end \n\n\t# if ~isfield(CFCcfg.fYcfg, 'n')||isempty(CFCcfg.fYcfg.n)||isnan(CFCcfg.fYcfg.n),\n\t# CFCcfg.fYcfg.n = 1; %Default value.\n\t# warning('MATLAB:function_PLV', ['\"CFCcfg.fYcfg.n\" is not specified, the default value is used: CFCcfg.fYcfg.n = ',...\n\t# num2str(CFCcfg.fYcfg.n)]); \n\t# end \n\n\t# assert(length(size(x))==2 &&...\n\t# size(x,2)==length(CFCcfg.fXcfg.BPFcfg.f0) &&...\n\t# size(x,1)==max(size(x)),...\n\t# 'Input argument error in function \"function_PLV\": Wrong shape of the input matrix \"x\".');\n\n\t# assert(length(size(y))<=3 &&...\n\t# size(y,3)==length(CFCcfg.fXcfg.BPFcfg.f0) &&...\n\t# size(y,2)==length(CFCcfg.fYcfg.BPFcfg.f0) &&...\n\t# size(y,1)==max(size(y)),...\n\t# 'Input argument error in function \"function_PLV\": Wrong shape of the input matrix \"y\".');\n\n\t# if ~isempty(wx),\n\t# assert(isequal(size(wx),size(x)),...\n\t# 'Input argument error in function \"function_PLV\": Wrong shape of the input matrix \"wx\".');\n\t# end\n\n\t# if ~isempty(wy),\n\t# assert(isequal(size(wy),size(y)),...\n\t# 'Input argument error in function \"function_PLV\": Wrong shape of the input matrix \"wy\".');\n\t# end\n\n\t# Default values of the outputs ----------------------------------\n\twxPLV = []\n\twyPLV = []\n\t# ----------------------------------------------------------------\n\n\t# Parameters -----------------------------------------------------\n\tNfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0']) # Compute the length of the frequency vectors.\n\tNfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0']) # Compute the length of the frequency vectors.\n\tNs = np.shape(x)[0]\t# Number of samples\n\tnX = CFCcfg['fXcfg']['n'] # Compute the harmonic number for detection of nX:nY phase locking\n\tnY = CFCcfg['fYcfg']['n'] # Compute the harmonic number for detection of nX:nY phase locking\n\t# ----------------------------------------------------------------\n\n\t# Compute the modulation index \"PLV\" ---------------------------------------\n\tPLV = np.zeros((NfY,NfX),dtype=complex) # Memory pre-allocation for speed up the loop. \n\n\tfor ii in range(NfY): # Loop across the \"y\" frequencies.\n\t\tPLV[ii,:] = np.sum(np.exp(1j * (nX*x - nY*y[:,ii,:])),0) / Ns\n\t# ---------------------------------------------------------------------------\n\n\t# # Compute the modulation index \"wxPLV\" -------------------------------------\n\t# if ~isempty(wx):\n\t# \twxPLV = np.zeros((NfY,NfX)) # Memory pre-allocation for speed up the loop.\n\t# \tfor ii in range(NfY): # Loop across the \"y\" frequencies.\n\t# \t\twxPLV(ii,:) = sum(wx.*exp(1j*(nX*x-nY*squeeze(y(:,ii,:)))),1) / Ns;\n\t# # -------------------------------------------------------------------------\n\n\t# # Compute the modulation index \"wyPLV\" -------------------------------------\n\t# if ~isempty(wy):\n\t# \twyPLV = np.zeros((NfY,NfX)) # Memory pre-allocation for speed up the loop.\n\t# \tfor ii in range(NfY): # Loop across the \"y\" frequencies.\n\t# \t\twyPLV(ii,:) = sum(squeeze(wy(:,ii,:)).*exp(1j*(nX*x-nY*squeeze(y(:,ii,:)))),1) / Ns;\n\t# # -------------------------------------------------------------------------\n\n\treturn PLV, wxPLV, wyPLV\n\n# --------------------------------------------------------------------------------------\n# --------------------------------------------------------------------------------------",
"<docstring token>\nimport numpy as np\nfrom sklearn.preprocessing import scale\nfrom scipy.signal import hilbert\nimport filtering\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\nFILTERS_SWITCHER = {'function_FDF': filtering.function_FDF,\n 'function_eegfilt': filtering.function_eegfilt, 'function_butterBPF':\n filtering.function_butterBPF}\n\n\ndef function_comodulogramBPF(signal, BPFcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the Band-Pass Filtering of the input signal.\n The input signal is supposed to be a raw (unfiltered) time series.\n\n\n Inputs:\n - signal: Numeric array (Nsamples x 1). Data.\n - BPFcfg: Structure. \n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'function': string {'function_butterBPF', 'function_eegfilt', 'function_FDF'}\n It specifies the function for the Band-Pass Filter:\n * 'function_butterBPF', a BPF IIR filter is implemented using a series connection of a\n High-Pass followed by a Low-Pass Butterworth filters.\n * 'function_eegfilt', a BPF FIR filter is implemented using the \"eegfilt.m\" function from\n EEGLAB toolbox.\n * 'function_FDF', a Frequency Domain Filtering is implemented using a window function. \n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Int value. External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - indSettlingMax: Int value. Index corresponding to the longest transient response of the BPFs.\n - BPFsignal: Numeric array (Nsamples x Nf x NBw). Band-Pass Filtered signals.\n where: Ns = np.shape[signal,0]. Number of samples.\n Nf = len(fcfg['BPFcfg']['f0']). Number of frequencies.\n NBw = len(fcfg['BPFcfg']['Bw']). Number of Bandwidths.\n \"\"\"\n if 'f1' in BPFcfg.keys() and 'f2' in BPFcfg.keys():\n BPFcfg['f0'] = (BPFcfg['f1'] + BPFcfg['f2']) / 2\n BPFcfg['Bw'] = BPFcfg['f2'] - BPFcfg['f1']\n Nf = np.size(BPFcfg['f0'])\n NBw = np.size(BPFcfg['Bw'])\n fnyq = fs / 2\n Ncycle = np.round(fs / np.atleast_1d(BPFcfg['f0'])[0])\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n indSettlingMax = indSettlingExt\n BPFsignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n BPFsignal_local = np.zeros((Ns, Nf))\n indSettling = np.zeros((1, Nf))\n for jj in range(Nf):\n BPFcfg_local = BPFcfg\n BPFcfg_local['Bw'] = np.atleast_1d(BPFcfg['Bw'])[ii]\n BPFcfg_local['f0'] = np.atleast_1d(BPFcfg['f0'])[jj]\n if BPFcfg_local['f0'] - BPFcfg_local['Bw'] / 2 <= fs / Ns or (\n BPFcfg_local['f0'] + BPFcfg_local['Bw'] / 2) / fnyq >= 1:\n continue\n filter_function = FILTERS_SWITCHER.get(BPFcfg_local['function'],\n lambda : 'Invalid method')\n BPFsignal_localjj, indSettling[jj], _, _ = filter_function(signal,\n BPFcfg_local, fs)\n BPFsignal_local[:, jj] = np.real(np.squeeze(BPFsignal_localjj))\n indSettlingMax = max([indSettling, indSettlingMax])\n if indSettlingMax > indSettlingExt:\n print('Un msj no implementado- function_comodulogramBPF_v1_225')\n BPFsignal_local = BPFsignal_local[indSettlingExt - 1:\n BPFsignal_local.shape[0] - (indSettlingExt - 1), :]\n BPFsignal[:, :, ii] = BPFsignal_local\n return BPFsignal, indSettlingMax\n\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n return np.angle(hilbert(scale(signal), axis=0))\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\ndef function_feature_phofamp(signal):\n \"\"\" \n Description:\n Phase of the signal's amplitude envelope.\n \n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed;\n this ensures that phase values are not limited in range.\n \n Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n \"\"\"\n BPFfeature = np.abs(hilbert(signal, axis=0))\n BPFfeature = scale(BPFfeature)\n BPFfeature = np.angle(hilbert(BPFfeature, axis=0))\n return BPFfeature\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\nLIST_FEATURES = {'phase': function_feature_phase, 'amplitude':\n function_feature_amplitude, 'phaseofamplitude':\n function_feature_phofamp, 'frequency': function_feature_frequency,\n 'phaseoffrequency': function_feature_phoffreq}\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\nFILTERS_SWITCHER = {'function_FDF': filtering.function_FDF,\n 'function_eegfilt': filtering.function_eegfilt, 'function_butterBPF':\n filtering.function_butterBPF}\n\n\ndef function_comodulogramBPF(signal, BPFcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the Band-Pass Filtering of the input signal.\n The input signal is supposed to be a raw (unfiltered) time series.\n\n\n Inputs:\n - signal: Numeric array (Nsamples x 1). Data.\n - BPFcfg: Structure. \n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'function': string {'function_butterBPF', 'function_eegfilt', 'function_FDF'}\n It specifies the function for the Band-Pass Filter:\n * 'function_butterBPF', a BPF IIR filter is implemented using a series connection of a\n High-Pass followed by a Low-Pass Butterworth filters.\n * 'function_eegfilt', a BPF FIR filter is implemented using the \"eegfilt.m\" function from\n EEGLAB toolbox.\n * 'function_FDF', a Frequency Domain Filtering is implemented using a window function. \n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Int value. External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - indSettlingMax: Int value. Index corresponding to the longest transient response of the BPFs.\n - BPFsignal: Numeric array (Nsamples x Nf x NBw). Band-Pass Filtered signals.\n where: Ns = np.shape[signal,0]. Number of samples.\n Nf = len(fcfg['BPFcfg']['f0']). Number of frequencies.\n NBw = len(fcfg['BPFcfg']['Bw']). Number of Bandwidths.\n \"\"\"\n if 'f1' in BPFcfg.keys() and 'f2' in BPFcfg.keys():\n BPFcfg['f0'] = (BPFcfg['f1'] + BPFcfg['f2']) / 2\n BPFcfg['Bw'] = BPFcfg['f2'] - BPFcfg['f1']\n Nf = np.size(BPFcfg['f0'])\n NBw = np.size(BPFcfg['Bw'])\n fnyq = fs / 2\n Ncycle = np.round(fs / np.atleast_1d(BPFcfg['f0'])[0])\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n indSettlingMax = indSettlingExt\n BPFsignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n BPFsignal_local = np.zeros((Ns, Nf))\n indSettling = np.zeros((1, Nf))\n for jj in range(Nf):\n BPFcfg_local = BPFcfg\n BPFcfg_local['Bw'] = np.atleast_1d(BPFcfg['Bw'])[ii]\n BPFcfg_local['f0'] = np.atleast_1d(BPFcfg['f0'])[jj]\n if BPFcfg_local['f0'] - BPFcfg_local['Bw'] / 2 <= fs / Ns or (\n BPFcfg_local['f0'] + BPFcfg_local['Bw'] / 2) / fnyq >= 1:\n continue\n filter_function = FILTERS_SWITCHER.get(BPFcfg_local['function'],\n lambda : 'Invalid method')\n BPFsignal_localjj, indSettling[jj], _, _ = filter_function(signal,\n BPFcfg_local, fs)\n BPFsignal_local[:, jj] = np.real(np.squeeze(BPFsignal_localjj))\n indSettlingMax = max([indSettling, indSettlingMax])\n if indSettlingMax > indSettlingExt:\n print('Un msj no implementado- function_comodulogramBPF_v1_225')\n BPFsignal_local = BPFsignal_local[indSettlingExt - 1:\n BPFsignal_local.shape[0] - (indSettlingExt - 1), :]\n BPFsignal[:, :, ii] = BPFsignal_local\n return BPFsignal, indSettlingMax\n\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n return np.angle(hilbert(scale(signal), axis=0))\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\ndef function_feature_phofamp(signal):\n \"\"\" \n Description:\n Phase of the signal's amplitude envelope.\n \n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed;\n this ensures that phase values are not limited in range.\n \n Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n \"\"\"\n BPFfeature = np.abs(hilbert(signal, axis=0))\n BPFfeature = scale(BPFfeature)\n BPFfeature = np.angle(hilbert(BPFfeature, axis=0))\n return BPFfeature\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\nLIST_FEATURES = {'phase': function_feature_phase, 'amplitude':\n function_feature_amplitude, 'phaseofamplitude':\n function_feature_phofamp, 'frequency': function_feature_frequency,\n 'phaseoffrequency': function_feature_phoffreq}\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n\n\ndef function_comodulogramBPF(signal, BPFcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the Band-Pass Filtering of the input signal.\n The input signal is supposed to be a raw (unfiltered) time series.\n\n\n Inputs:\n - signal: Numeric array (Nsamples x 1). Data.\n - BPFcfg: Structure. \n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'function': string {'function_butterBPF', 'function_eegfilt', 'function_FDF'}\n It specifies the function for the Band-Pass Filter:\n * 'function_butterBPF', a BPF IIR filter is implemented using a series connection of a\n High-Pass followed by a Low-Pass Butterworth filters.\n * 'function_eegfilt', a BPF FIR filter is implemented using the \"eegfilt.m\" function from\n EEGLAB toolbox.\n * 'function_FDF', a Frequency Domain Filtering is implemented using a window function. \n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Int value. External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - indSettlingMax: Int value. Index corresponding to the longest transient response of the BPFs.\n - BPFsignal: Numeric array (Nsamples x Nf x NBw). Band-Pass Filtered signals.\n where: Ns = np.shape[signal,0]. Number of samples.\n Nf = len(fcfg['BPFcfg']['f0']). Number of frequencies.\n NBw = len(fcfg['BPFcfg']['Bw']). Number of Bandwidths.\n \"\"\"\n if 'f1' in BPFcfg.keys() and 'f2' in BPFcfg.keys():\n BPFcfg['f0'] = (BPFcfg['f1'] + BPFcfg['f2']) / 2\n BPFcfg['Bw'] = BPFcfg['f2'] - BPFcfg['f1']\n Nf = np.size(BPFcfg['f0'])\n NBw = np.size(BPFcfg['Bw'])\n fnyq = fs / 2\n Ncycle = np.round(fs / np.atleast_1d(BPFcfg['f0'])[0])\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n indSettlingMax = indSettlingExt\n BPFsignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n BPFsignal_local = np.zeros((Ns, Nf))\n indSettling = np.zeros((1, Nf))\n for jj in range(Nf):\n BPFcfg_local = BPFcfg\n BPFcfg_local['Bw'] = np.atleast_1d(BPFcfg['Bw'])[ii]\n BPFcfg_local['f0'] = np.atleast_1d(BPFcfg['f0'])[jj]\n if BPFcfg_local['f0'] - BPFcfg_local['Bw'] / 2 <= fs / Ns or (\n BPFcfg_local['f0'] + BPFcfg_local['Bw'] / 2) / fnyq >= 1:\n continue\n filter_function = FILTERS_SWITCHER.get(BPFcfg_local['function'],\n lambda : 'Invalid method')\n BPFsignal_localjj, indSettling[jj], _, _ = filter_function(signal,\n BPFcfg_local, fs)\n BPFsignal_local[:, jj] = np.real(np.squeeze(BPFsignal_localjj))\n indSettlingMax = max([indSettling, indSettlingMax])\n if indSettlingMax > indSettlingExt:\n print('Un msj no implementado- function_comodulogramBPF_v1_225')\n BPFsignal_local = BPFsignal_local[indSettlingExt - 1:\n BPFsignal_local.shape[0] - (indSettlingExt - 1), :]\n BPFsignal[:, :, ii] = BPFsignal_local\n return BPFsignal, indSettlingMax\n\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n return np.angle(hilbert(scale(signal), axis=0))\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\ndef function_feature_phofamp(signal):\n \"\"\" \n Description:\n Phase of the signal's amplitude envelope.\n \n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed;\n this ensures that phase values are not limited in range.\n \n Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n \"\"\"\n BPFfeature = np.abs(hilbert(signal, axis=0))\n BPFfeature = scale(BPFfeature)\n BPFfeature = np.angle(hilbert(BPFfeature, axis=0))\n return BPFfeature\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n return np.angle(hilbert(scale(signal), axis=0))\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\ndef function_feature_phofamp(signal):\n \"\"\" \n Description:\n Phase of the signal's amplitude envelope.\n \n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed;\n this ensures that phase values are not limited in range.\n \n Ref: Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n \"\"\"\n BPFfeature = np.abs(hilbert(signal, axis=0))\n BPFfeature = scale(BPFfeature)\n BPFfeature = np.angle(hilbert(BPFfeature, axis=0))\n return BPFfeature\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n\n\ndef function_feature_phase(signal):\n \"\"\" \n Description:\n Compute the phase of the z-scored BPF signal.\n\n Remark:\n Before the computation of the phase signal, the time series should be\n normalized, de-trended, or mean-subtracted to have the DC-component removed.\n this ensures that phase values are not limited in range.\n \n Ref: \n Assessing transient cross-frequency coupling in EEG data (Cohen 2008).pdf\n\n Angle [rad] in (-pi,pi]\n \"\"\"\n return np.angle(hilbert(scale(signal), axis=0))\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n\n\ndef function_feature_frequency(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n<function token>\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n\n\ndef function_comodulogramFeature(signal, fcfg, fs, indSettlingExt):\n \"\"\"\n Description:\n In this function we implement the extraction of the phase/amplitude/frequency \n time series from the input signals. The input signals are supposed to be \n previously Band-Pass Filtered signals around the frequency bands of interest.\n\n Inputs:\n - signal. Numeric array (Ns x Nf x NBw)\n Band-Pass Filtered signals. Notation:\n Ns: Number of samples.\n Nf: Number of frequencies. len(fcfg['BPFcfg']['f0'])\n NBw: Number of Bandwidths. len(fcfg['BPFcfg']['Bw'])\n\n - fcfg. Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n - 'start': Numeric value. Start frequency [Hz].\n - 'end': Numeric value. End frequency [Hz].\n - 'res': Numeric value. Frequency resolution [Hz].\n Define the frequency separation between two consecutive BPFs. \n - 'BPFcfg': Structure.\n Band-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n - 'lookAt': String. Parameter of the signal (phase/amplitude/frequency) observed in the range of\n frequency corresponding to the \"x(y)\" axis [none].\n - 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n The LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n - 'saveBPFsignal': Boolean value. Flag to return the Band-Pass Filtered signals.\n *0: Return a NaN.\n *1: Return the filtered signals. \n - 'Nbins': Integer value. \n Number of phase/amplitude/frequency bins used to compute the Histograms (p) of the comodulogram. \n\n - 'sameNumberOfCycles': Boolean value. Flag to configure the processing mode for signal x.\n *0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n *1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n - fs: Numeric value. Sampling rate [Hz].\n - indSettlingExt: Integer value. \n External index for cutting out the transient response of the BPFs.\n If \"indSettlingExt\" is empty or NaN, the index for the longest settling time is used.\n\n Outputs:\n - BPFfeature: Numeric array (Ns x NF x NBw)\n Phase/amplitud/frequency time series for the \"x\" or \"y\" axis of the comodulogram\n\n - croppedSignal: Numeric array (Ns-2*(indSettlingExt-1) x Nf x NBw) \n Cropped Band-Pass Filtered signals (in the case of saveBPFsignal=1)\n \"\"\"\n if 'f1' in fcfg['BPFcfg'].keys() and 'f2' in fcfg['BPFcfg'].keys():\n fcfg['BPFcfg']['f0'] = (fcfg['BPFcfg']['f1'] + fcfg['BPFcfg']['f2']\n ) / 2\n fcfg['BPFcfg']['Bw'] = fcfg['BPFcfg']['f2'] - fcfg['BPFcfg']['f1']\n croppedSignal = []\n Nf = np.size(fcfg['BPFcfg']['f0'])\n NBw = np.size(fcfg['BPFcfg']['Bw'])\n fnyq = fs / 2\n Ns = np.shape(signal)[0]\n Ns_cropped = Ns - 2 * (indSettlingExt - 1)\n BPFfeature = np.zeros((Ns_cropped, Nf, NBw))\n if fcfg['saveBPFsignal']:\n croppedSignal = np.zeros((Ns_cropped, Nf, NBw))\n for ii in range(NBw):\n signal_local = signal[:, :, ii]\n feature = fcfg['lookAt'].lower()\n function_feature = LIST_FEATURES.get(feature, lambda : 'Invalid method'\n )\n BPFfeature_local = function_feature(signal_local)\n BPFfeature_local = BPFfeature_local[indSettlingExt - 1:\n BPFfeature_local.shape[0] - (indSettlingExt - 1), :]\n BPFfeature[:, :, ii] = BPFfeature_local\n if fcfg['saveBPFsignal']:\n croppedSignal[:, :, ii] = signal_local[indSettlingExt - 1:\n signal_local.shape[0] - (indSettlingExt - 1), :]\n return BPFfeature, croppedSignal\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n<function token>\n\n\ndef function_feature_phoffreq(signal):\n print('Sin implementar. Devuelve 0')\n return 0\n\n\n<assignment token>\n<function token>\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n\n\ndef function_setCFCcfg(CFCin):\n \"\"\"\n\tDescription:\n\tIn this function we compute the structures for the \"x\" and \"y\" axis of the comodulogram.\n\n\tInputs:\n\t- CFCin: Structure. Parameters of the comodulogram.\n\t\t\t\t- 'fXmin': Numeric value. Minimum frequency for the LF band [Hz].\n\t\t\t\t- 'fXmax': Numeric value. Maximum frequency for the LF band [Hz].\n\t\t\t\t- 'fYmin': Numeric value. Minimum frequency for the HF band [Hz].\n\t\t\t\t- 'fYmax': Numeric value. Maximum frequency for the HF band [Hz].\n\t\t\t\t- 'fXres': Numeric value. Frequency resolution for the LF band [Hz].\n\t\t\t\t- 'fYres': Numeric value. Frequency resolution for the HF band [Hz].\n\t\t\t\t- 'fXlookAt': String. \n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n\t \t frequency corresponding to the \"x\" axis.\n\t\t\t\t- 'fYlookAt': String.\n\t\t\t\t\t\t\t Parameter of the signal observed in the range of\n \t frequency corresponding to the \"y\" axis.\n\t\t\t\t- 'nX': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'nY': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t- 'BPFXcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"x\" axis.\n\t\t\t\t- 'BPFYcfg': Structure. Band-Pass Filter configuration for the comodulogram's \"y\" axis.\n\t\t\t\t- 'LPFcfg': Structure. Low-Pass Filter configuration to smooth the frequency time series.\n \tThe LPF Filter is used to smooth the frequency time series \n\t\t\t\t\t\t\t(fYlookAt = 'FREQUENCY' or 'PHASEofFREQUENCY').\n\t\t\t\t- 'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals.\n 0 - Return a NaN.\n 1 - Return the filtered signals.\n\t\t\t\t- 'Nbins': Int value. \n\t\t\t\t\t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n\t\t\t\t- 'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x:\n 0 - Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod': String. {'plv','mi'}\n\t\t\t\t\t\t\tDefines the approach to compute the Cross frequency Coupling \n\t\t\t\t\t\t\t(PLV / methods to compute the MI).\n\t\t\t\t- 'verbose': Boolean {0,1}. \n\t\t\t\t\t\t\t 0: no message are shown.\n \t 1: show the messages.\n\t\t\t\t- 'perMethod': String. Method by which the surrogated time series are built. Options\n\t\t\t\t\t\t\t* 'trialShuffling'\n\t\t\t\t\t\t\t* 'sampleShuffling'\n \t* 'FFTphaseShuffling'\n \t* 'cutShuffling'\n\t\t\t\t- 'Nper': Int value. Number of permutations.\n\t\t\t\t\t\t It defines the number of surrogate histograms per\n\t\t\t\t\t\t repetition. It is worth noting that in each repetition, \"Nper\" surrogate histograms of size\n\t\t\t\t\t\t \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t- 'Nrep': Int value. Number of repetitions.\n\t\t\t\t\t\t In each repetition a \".mat\" file is written to disk,\n\t\t\t\t\t\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n\t\t\t\t\t\t As a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t- 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t- 'corrMultComp': String {'Bonferroni', 'pixelBased'}.\n\t\t\t\t\t\t\t\t Method to correct for multiple comparisons.\n\t\t\t\t- 'fs': Numeric value.\n\n\tOutputs:\n\t- CFCout: Structure. Parameters of the comodulogram.\n\t\t\t\t-'fXcfg', 'fYcfg': Structure. Parameters of the Frequency Band in \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'start': \tNumeric value. Start frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'end':\tNumeric value. End frequency [Hz].\n\t\t\t\t\t\t\t\t\t- 'res': \tNumeric value. Frequency resolution [Hz].\n\t\t\t\t\t\t\t\t\t\t\t\tDefine the frequency separation between two consecutive BPFs.\n\t\t\t\t\t\t\t\t\t- 'BPFcfg': Structure. \n\t\t\t\t\t\t\t\t\t\t\t\tBand-Pass Filter configuration for the comodulogram's \"x(y)\" axis.\n\t\t\t\t\t\t\t\t\t- 'lookAt': String. Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t\t\t\tfrequency corresponding to the \"x(y)\" axis [none] (string).\n\t\t\t\t\t\t\t\t\t- 'n': Int value. Harmonic number for detection of fbX.n:fbY.n phase locking.\n\t\t\t\t\t\t\t\t\t\t\t\tRef: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\t\t\t\t\t\t\t\t\t- 'LPFcfg' Structure.\n\t\t\t\t\t\t\t\t\t\t\t\tLow-Pass Filter configuration to smooth the frequency time series (structure array).\n \t\tThe LPF Filter is used to smooth the frequency time series (fYlookAt = 'FREQUENCY'\n \t\tor 'PHASEofFREQUENCY').\n\t\t\t\t\t\t\t\t\t- 'saveBPFsignal': Boolean. Flag to return the Band-Pass Filtered signals.\n\t\t\t\t\t\t\t\t\t\t\t\t0: Return a NaN.\n\t\t\t\t\t\t\t\t\t\t\t\t1: Return the filtered signals.\n\t\t\t\t\t\t\t\t\t- 'Nbins': Int value. Number of phase/amplitude bins used to compute the Histograms \n\t\t\t\t\t\t\t\t\t\t\t\t(p) of the comodulogram. \n \t\t- 'sameNumberOfCycles': Boolean. Flag to configure the processing mode for signal x.\n 0: Do not truncate the signal \"x\" to obtain the same number of cycles.\n 1: Process the same number of cycles of signal \"x\" for all \"fX\" frequencies.\n\t\t\t\t- 'CFCmethod'\n\t\t\t\t- 'verbose'\n\t\t\t\t- 'perMethod'\n\t\t\t\t- 'Nper'\n\t\t\t\t- 'Nrep'\n\t\t\t\t- 'Pvalue'\n\t\t\t\t- 'corrMultComp'\n\t\t\t\t- 'fs'\n\t\"\"\"\n fXcfg = {'start': CFCin['fXmin'], 'end': CFCin['fXmax'], 'res': CFCin[\n 'fXres'], 'BPFcfg': CFCin['BPFXcfg'], 'lookAt': CFCin['fXlookAt'],\n 'n': CFCin['nX'], 'Nbins': CFCin['Nbins'], 'sameNumberOfCycles':\n CFCin['sameNumberOfCycles'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n fYcfg = {'start': CFCin['fYmin'], 'end': CFCin['fYmax'], 'res': CFCin[\n 'fYres'], 'BPFcfg': CFCin['BPFYcfg'], 'lookAt': CFCin['fYlookAt'],\n 'n': CFCin['nY'], 'saveBPFsignal': CFCin['saveBPFsignal']}\n if fYcfg['lookAt'].lower == 'frequency' or fYcfg['lookAt'\n ].lower == 'phaseoffrequency':\n fYcfg['LPFcfg'] = CFCin['LPFcfg']\n if CFCin['fXmin'] <= CFCin['BPFXcfg']['Bw'] / 2:\n fXcfg['start'] = CFCin['fXmin'] + CFCin['BPFXcfg']['Bw'] / 2\n fXcfg['BPFcfg']['f0'] = np.linspace(fXcfg['start'], fXcfg['end'], np.\n ceil((fXcfg['end'] - fXcfg['start']) / fXcfg['res']))\n if 'times' in fXcfg['BPFcfg'].keys() and len(fXcfg['BPFcfg']['times']) > 1:\n fXcfg['BPFcfg']['times'] = np.linspace(fXcfg['BPFcfg']['times'][0],\n fXcfg['BPFcfg']['times'][-1], len(fXcfg['BPFcfg']['times']))\n if type(fYcfg['BPFcfg']['Bw'] * 1.0) == float:\n fYcfg['BPFcfg']['Bw'] = fYcfg['BPFcfg']['Bw'] * np.ones(np.shape(\n fXcfg['BPFcfg']['f0']))\n else:\n fYcfg['BPFcfg']['Bw'] = 2 * fXcfg['BPFcfg']['f0']\n if fYcfg['start'] <= fYcfg['BPFcfg']['Bw'][0] / 2:\n fYcfg['start'] = fYcfg['start'] + fYcfg['BPFcfg']['Bw'][0] / 2\n fYcfg['BPFcfg']['f0'] = np.linspace(fYcfg['start'], fYcfg['end'], np.\n ceil((fYcfg['end'] - fYcfg['start']) / fYcfg['res']))\n if 'times' in fYcfg['BPFcfg'].keys() and len(fYcfg['BPFcfg']['times']) > 1:\n fYcfg['BPFcfg']['times'] = np.linspace(fYcfg['BPFcfg']['times'][0],\n fYcfg['BPFcfg']['times'][-1], len(fYcfg['BPFcfg']['times']))\n CFCout = {'fXcfg': fXcfg, 'fYcfg': fYcfg, 'CFCmethod': CFCin[\n 'CFCmethod'], 'verbose': CFCin['verbose'], 'perMethod': CFCin[\n 'perMethod'], 'Nper': CFCin['Nper'], 'Nrep': CFCin['Nrep'],\n 'Pvalue': CFCin['Pvalue'], 'corrMultComp': CFCin['corrMultComp'],\n 'fs': CFCin['fs']}\n return CFCout\n\n\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<function token>\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<function token>\n\n\ndef function_PLV(x, y, wx, wy, CFCcfg):\n \"\"\"\n\tDescription: \n\tIn this function we compute the Phase Locking Values.\n\n\tRefs:\n\t[1] /PhaseLockingValue/function_PhaseLockingValue_v1.m\n\t[2] Measuring Phase-Amplitude Coupling Between Neuronal Oscillations (Tort, 2010).pdf, p. 1198\n\t[3] High gamma power is phase-locked to theta oscillations (Canolty, 2006).pdf\n\t[4] Phase Locking from Noisy Data (Tass, 1998).pdf\n Ref: Detection of n,m Phase Locking from Noisy Data (Tass, 1998).pdf \n\n\tInputs:\n\t\t- x: Numeric array (Nsamples x NfX).\n\t\t \t Data for the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- y: Numeric array (Nsamples x NfY x NfX). \n\t\t Data for the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- wx: Numeric array (Nsamples x NfX). \n\t\t\t Weights related to the comodulogram's \"x\" axis (matrix: samples x NfX).\n\t\t- wy: Numeric array (Nsamples x NfY x NfX).\n\t\t Weights related to the comodulogram's \"y\" axis (matrix: samples x NfY x NfX).\n\t\t- CFCcfc: structure. \n\t\t\t\t Parameters of the comodulogram (structure array)\n\t\t\t\t - 'fXcfg': structure.\n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"x\" axis.\n\t\t\t\t - 'fYcfg': structure. \n\t\t\t\t \t\t\t Parameters of the Frequency Band in \"y\" axis.\n\t\t\t\t \t\t\t-'start': Numeric value. Start frequency [Hz].\n\t\t\t\t \t\t\t-'end': Numeric value. End frequency [Hz].\n\t\t\t\t \t\t\t-'res': Numeric value. Frequency resolution.\n\t\t\t\t \t\t\t\t\t Define the frequency separation between two consecutive BPFs.\n\t\t\t\t \t\t\t-'BPFcfg': Structure. Band-Pass Filter configuration for the comodulogram's axis.\n\t\t\t\t\t\t\t-'lookAt': String.\n\t\t\t\t\t\t\t\t\t Parameter of the signal (phase/amplitude) observed in the range of\n\t\t\t\t\t\t\t\t\t frequency [none] (string).\n\t\t\t\t\t\t\t-'n': Int value. Harmonic number for detection of phase locking.\n\t\t\t\t\t\t\t-'saveBPFsignal': {0,1}. Flag to return the Band-Pass Filtered signals. \n\t\t\t\t\t\t\t\t\t\t\t 0 - Return a NaN.\n 1 - Return the filtered signals.\n -'Nbins': Int value.\n \t\t Number of phase/amplitude bins used to compute the Histograms (p) of the comodulogram. \n -'sameNumberOfCycles': {0,1}. Flag to configure the processing mode for signal x.\n \t\t\t\t\t 0 - Do not truncate the signal \"x\" to obtain the same number \n \t\t\t\t\t \t of cycles.\n 1 - Process the same number of cycles of signal \"x\" for all \n\t\t\t\t\t\t\t\t\t\t\t\t\t \"fX\" frequencies.\n\t\t\t\t - 'CFCmethod': String.\n\t\t\t\t \t\t\t\t Defines the approach to compute the Cross frequency Coupling. E.g: 'plv'.\n\t\t\t\t - 'verbose': Boolean. Display flag. \n\t\t\t\t - 'perMethod': {'trialShuffling', 'sampleShuffling', 'FFTphaseShuffling', 'cutShuffling'}. \n\t\t\t\t \t\t\t\t Method by which the surrogated time series are built.\n\t\t\t\t - 'Nper': Int value. \n\t\t\t\t \t\t Number of permutations. It defines the number of surrogate histograms per\n repetition. It is worth noting that in each repetition, \"Nper\" surrogate \n\t\t\t\t\t\t\thistograms of size \"Nbins x NfY x NfX\" are stored in memory (RAM).\n\t\t\t\t - 'Nrep': Int value.\n\t\t\t\t Number of repetitions. In each repetition a \".mat\" file is written to disk,\n\t containing \"Nper\" surrogate histograms of size \"Nbins x NfY x NfX\". \n \tAs a consequence, the final number of surrogate histograms is \"Nper x Nrep\".\n\t\t\t\t - 'Pvalue': Numeric value. P-value for the statistically significant level.\n\t\t\t\t - 'corrMultComp': {'Bonferroni', 'pixelBased'} Method to correct for multiple comparisons.\n\t\t\t\t - 'fs': Numeric value. Sampling rate [Hz].\n\n\tOutputs:\n\t\t- PLV: Numeric array (NfY x NfX).\n\t\t\t Phase Locking Value.\n\t\t- wxPLV: Numeric array (NfY x NfX).\n\t\t\t\t Weighted Phase Locking Values using the wx weights (matrix: NfY x NfX).\n\t\t- wyPLV: Numeric array (NfY x NfX). \n\t\t\t\t Weighted Phase Locking Values using the wy weights (matrix: NfY x NfX).\n\n\t \t\t\t NfX = length(CFCcfg['fXcfg']['BPFcfg']['f0'])\n\t \t\t\t NfY = length(CFCcfg['fYcfg']['BPFcfg']['f0'])\n\t\"\"\"\n wxPLV = []\n wyPLV = []\n NfX = np.size(CFCcfg['fXcfg']['BPFcfg']['f0'])\n NfY = np.size(CFCcfg['fYcfg']['BPFcfg']['f0'])\n Ns = np.shape(x)[0]\n nX = CFCcfg['fXcfg']['n']\n nY = CFCcfg['fYcfg']['n']\n PLV = np.zeros((NfY, NfX), dtype=complex)\n for ii in range(NfY):\n PLV[ii, :] = np.sum(np.exp(1.0j * (nX * x - nY * y[:, ii, :])), 0) / Ns\n return PLV, wxPLV, wyPLV\n",
"<docstring token>\n<import token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n\n\ndef function_feature_amplitude(signal):\n \"\"\" \n Description:\n Compute the amplitude (signal envelope).\n Amplitude envelope of the signal (AM demodulation).\n\n \"\"\"\n return np.abs(hilbert(signal, axis=0))\n\n\n<function token>\n<function token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n",
"<docstring token>\n<import token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<function token>\n<assignment token>\n<function token>\n<function token>\n"
] | false |
99,098 |
8c79017e2a3d1ef3fd85df694b34e026a7e73062
|
print("hi niggles, how are you!!!")
|
[
"print(\"hi niggles, how are you!!!\")\n",
"print('hi niggles, how are you!!!')\n",
"<code token>\n"
] | false |
99,099 |
d4c71f582457a5ec03993d95cccd082e51cd04f0
|
from django.conf.urls import *
from django.contrib.auth.forms import AuthenticationForm
from django.views.generic import RedirectView
urlpatterns = patterns('',
(r'^$', RedirectView.as_view(url='/products/')),
(r'^products/', include('products.urls')),
(r'^services/', include('services.urls')),
(r'^accounts/', include('accounts.urls')),
(r'^api/', include('api.urls')),
)
|
[
"from django.conf.urls import *\nfrom django.contrib.auth.forms import AuthenticationForm\nfrom django.views.generic import RedirectView\n\nurlpatterns = patterns('',\n (r'^$', RedirectView.as_view(url='/products/')),\n (r'^products/', include('products.urls')),\n (r'^services/', include('services.urls')),\n (r'^accounts/', include('accounts.urls')),\n (r'^api/', include('api.urls')),\n)",
"from django.conf.urls import *\nfrom django.contrib.auth.forms import AuthenticationForm\nfrom django.views.generic import RedirectView\nurlpatterns = patterns('', ('^$', RedirectView.as_view(url='/products/')),\n ('^products/', include('products.urls')), ('^services/', include(\n 'services.urls')), ('^accounts/', include('accounts.urls')), ('^api/',\n include('api.urls')))\n",
"<import token>\nurlpatterns = patterns('', ('^$', RedirectView.as_view(url='/products/')),\n ('^products/', include('products.urls')), ('^services/', include(\n 'services.urls')), ('^accounts/', include('accounts.urls')), ('^api/',\n include('api.urls')))\n",
"<import token>\n<assignment token>\n"
] | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.