code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
import typing
import torch.nn as nn
from .torch_utils import get_activation, BatchNorm1d
from dna.models.torch_modules.torch_utils import PyTorchRandomStateContext
class Submodule(nn.Module):
def __init__(self, layer_sizes: typing.List[int], activation_name: str,
use_batch_norm: bool, use_skip: bool=False, dropout: float=0.0, *,
device: str='cuda:0', seed: int=0):
super().__init__()
with PyTorchRandomStateContext(seed):
n_layers = len(layer_sizes) - 1
activation = get_activation(activation_name)
layers = []
for i in range(n_layers):
if i > 0:
layers.append(activation())
if dropout > 0.0:
layers.append(nn.Dropout(p=dropout))
if use_batch_norm:
layers.append(BatchNorm1d(layer_sizes[i]))
layers.append(nn.Linear(layer_sizes[i], layer_sizes[i + 1]))
self.net = nn.Sequential(*layers)
self.net.to(device=device)
if use_skip:
if layer_sizes[0] == layer_sizes[-1]:
self.skip = nn.Sequential()
else:
self.skip = nn.Linear(layer_sizes[0], layer_sizes[-1])
self.skip.to(device=device)
else:
self.skip = None
def forward(self, x):
if self.skip is None:
return self.net(x)
else:
return self.net(x) + self.skip(x)
|
normal
|
{
"blob_id": "950b2906853c37cdeaa8ed1076fff79dbe99b6f8",
"index": 8327,
"step-1": "<mask token>\n\n\nclass Submodule(nn.Module):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Submodule(nn.Module):\n\n def __init__(self, layer_sizes: typing.List[int], activation_name: str,\n use_batch_norm: bool, use_skip: bool=False, dropout: float=0.0, *,\n device: str='cuda:0', seed: int=0):\n super().__init__()\n with PyTorchRandomStateContext(seed):\n n_layers = len(layer_sizes) - 1\n activation = get_activation(activation_name)\n layers = []\n for i in range(n_layers):\n if i > 0:\n layers.append(activation())\n if dropout > 0.0:\n layers.append(nn.Dropout(p=dropout))\n if use_batch_norm:\n layers.append(BatchNorm1d(layer_sizes[i]))\n layers.append(nn.Linear(layer_sizes[i], layer_sizes[i + 1]))\n self.net = nn.Sequential(*layers)\n self.net.to(device=device)\n if use_skip:\n if layer_sizes[0] == layer_sizes[-1]:\n self.skip = nn.Sequential()\n else:\n self.skip = nn.Linear(layer_sizes[0], layer_sizes[-1])\n self.skip.to(device=device)\n else:\n self.skip = None\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Submodule(nn.Module):\n\n def __init__(self, layer_sizes: typing.List[int], activation_name: str,\n use_batch_norm: bool, use_skip: bool=False, dropout: float=0.0, *,\n device: str='cuda:0', seed: int=0):\n super().__init__()\n with PyTorchRandomStateContext(seed):\n n_layers = len(layer_sizes) - 1\n activation = get_activation(activation_name)\n layers = []\n for i in range(n_layers):\n if i > 0:\n layers.append(activation())\n if dropout > 0.0:\n layers.append(nn.Dropout(p=dropout))\n if use_batch_norm:\n layers.append(BatchNorm1d(layer_sizes[i]))\n layers.append(nn.Linear(layer_sizes[i], layer_sizes[i + 1]))\n self.net = nn.Sequential(*layers)\n self.net.to(device=device)\n if use_skip:\n if layer_sizes[0] == layer_sizes[-1]:\n self.skip = nn.Sequential()\n else:\n self.skip = nn.Linear(layer_sizes[0], layer_sizes[-1])\n self.skip.to(device=device)\n else:\n self.skip = None\n\n def forward(self, x):\n if self.skip is None:\n return self.net(x)\n else:\n return self.net(x) + self.skip(x)\n",
"step-4": "import typing\nimport torch.nn as nn\nfrom .torch_utils import get_activation, BatchNorm1d\nfrom dna.models.torch_modules.torch_utils import PyTorchRandomStateContext\n\n\nclass Submodule(nn.Module):\n\n def __init__(self, layer_sizes: typing.List[int], activation_name: str,\n use_batch_norm: bool, use_skip: bool=False, dropout: float=0.0, *,\n device: str='cuda:0', seed: int=0):\n super().__init__()\n with PyTorchRandomStateContext(seed):\n n_layers = len(layer_sizes) - 1\n activation = get_activation(activation_name)\n layers = []\n for i in range(n_layers):\n if i > 0:\n layers.append(activation())\n if dropout > 0.0:\n layers.append(nn.Dropout(p=dropout))\n if use_batch_norm:\n layers.append(BatchNorm1d(layer_sizes[i]))\n layers.append(nn.Linear(layer_sizes[i], layer_sizes[i + 1]))\n self.net = nn.Sequential(*layers)\n self.net.to(device=device)\n if use_skip:\n if layer_sizes[0] == layer_sizes[-1]:\n self.skip = nn.Sequential()\n else:\n self.skip = nn.Linear(layer_sizes[0], layer_sizes[-1])\n self.skip.to(device=device)\n else:\n self.skip = None\n\n def forward(self, x):\n if self.skip is None:\n return self.net(x)\n else:\n return self.net(x) + self.skip(x)\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import os
import sys
import pandas as pd
import pickle as pkl
from src.utils import image as im
if __name__ == '__main__':
pickled = True
create_sets = True
normed = False
if len(sys.argv) > 2:
filename = sys.argv[1]
else:
filename = os.path.join(os.path.pardir, os.path.pardir, 'data',
'final_transp_directpkl.pkl')
if os.path.splitext(filename)[1] == '.txt':
iter_csv = pd.read_csv(filename, sep='\t', index_col=0, chunksize=20000
)
df = pd.concat([chunk for chunk in iter_csv])
else:
df = pkl.load(open(filename, 'rb'))
fig = im.plot_genes(df.sample(1000))
fig.savefig(os.path.splitext(filename)[0] + '.png')
|
normal
|
{
"blob_id": "18a17c7326a6ae96f74c843d1a902074b377a6d2",
"index": 2701,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n pickled = True\n create_sets = True\n normed = False\n if len(sys.argv) > 2:\n filename = sys.argv[1]\n else:\n filename = os.path.join(os.path.pardir, os.path.pardir, 'data',\n 'final_transp_directpkl.pkl')\n if os.path.splitext(filename)[1] == '.txt':\n iter_csv = pd.read_csv(filename, sep='\\t', index_col=0, chunksize=20000\n )\n df = pd.concat([chunk for chunk in iter_csv])\n else:\n df = pkl.load(open(filename, 'rb'))\n fig = im.plot_genes(df.sample(1000))\n fig.savefig(os.path.splitext(filename)[0] + '.png')\n",
"step-3": "import os\nimport sys\nimport pandas as pd\nimport pickle as pkl\nfrom src.utils import image as im\nif __name__ == '__main__':\n pickled = True\n create_sets = True\n normed = False\n if len(sys.argv) > 2:\n filename = sys.argv[1]\n else:\n filename = os.path.join(os.path.pardir, os.path.pardir, 'data',\n 'final_transp_directpkl.pkl')\n if os.path.splitext(filename)[1] == '.txt':\n iter_csv = pd.read_csv(filename, sep='\\t', index_col=0, chunksize=20000\n )\n df = pd.concat([chunk for chunk in iter_csv])\n else:\n df = pkl.load(open(filename, 'rb'))\n fig = im.plot_genes(df.sample(1000))\n fig.savefig(os.path.splitext(filename)[0] + '.png')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.db import connection
from .models import Order
from .models import Package
from .models import DeliveryStatus
from .models import CalcParameters
class DataService:
def __init__(self):
pass
@staticmethod
def get_all_orders():
orders = Order.objects.order_by('-order_date')
# create new variables for display
for o in orders:
o.package_names = ', '.join([p.name for p in list(o.packages.all())])
o.delivery_date = o.deliveryinfo_set.get().delivery_date
o.delivery_charge = o.deliveryinfo_set.get().charge
return orders
@staticmethod
def get_all_packages():
return Package.objects.all()
@staticmethod
def get_shopping_list_details(order_ids, dish_ids=None):
"""
:param order_ids: a list of order ids as int or str. Or a single order id as int or str
:param dish_ids: Restrict shopping list to these dishes.
A list of dish ids as int or str. Or a single order id as int or str.
:return: Return shopping list for the given orders
"""
if isinstance(order_ids, str):
order_ids = [int(order_ids)]
if isinstance(order_ids, int):
order_ids = [order_ids]
if not isinstance(order_ids, list):
raise Exception('Expecting a single order id or a list of order ids. Got [{ids}]'.format(ids=order_ids))
SQL = """select
d.id dish_id,
d.name dish_name,
sum(op.package_qty) dish_qty,
sum(d.portion_count) portion_count,
i.name ingredient_name,
round(sum(di.ingredient_weight * op.package_qty), 2) total_ingredient_weight,
round(sum(di.ingredient_weight * (i.cost_price/i.measure) * op.package_qty), 2) total_cost_price
from
orders o, order_package op, package_dish pd, dish d, dish_ingredient di, ingredient i
where
o.id = op.order_id and
op.package_id = pd.package_id and
pd.dish_id = d.id and
d.id = di.dish_id and
di.ingredient_id = i.id and
o.id in ({ids})
group by d.id, d.name, i.name
order by d.name, i.name""".format(ids=','.join([str(x) for x in order_ids]))
with connection.cursor() as cursor:
cursor.execute(SQL)
rows = cursor.fetchall()
# return a list of tuples rather than a tuple of tuples
return [row for row in rows]
class StaticDataDao(type):
@property
def delivery_statuses(cls):
if getattr(cls, '_delivery_statuses', None) is None:
cls._delivery_statuses = list(DeliveryStatus.objects.all())
return cls._delivery_statuses
@property
def calc_parameters(cls):
if getattr(cls, '_calc_parameters', None) is None:
m = {}
for p in list(CalcParameters.objects.all()):
m[p.name] = p.value
cls._calc_parameters = m
return cls._calc_parameters
class StaticDataService(object):
__metaclass__ = StaticDataDao
|
normal
|
{
"blob_id": "2e66a31638eb4e619f14a29d5d3847482d207003",
"index": 3996,
"step-1": "<mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-2": "<mask token>\n\n\nclass DataService:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-3": "<mask token>\n\n\nclass DataService:\n\n def __init__(self):\n pass\n <mask token>\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-4": "<mask token>\n\n\nclass DataService:\n\n def __init__(self):\n pass\n\n @staticmethod\n def get_all_orders():\n orders = Order.objects.order_by('-order_date')\n for o in orders:\n o.package_names = ', '.join([p.name for p in list(o.packages.\n all())])\n o.delivery_date = o.deliveryinfo_set.get().delivery_date\n o.delivery_charge = o.deliveryinfo_set.get().charge\n return orders\n <mask token>\n <mask token>\n\n\nclass StaticDataDao(type):\n\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-5": "from django.db import connection\n\nfrom .models import Order\nfrom .models import Package\nfrom .models import DeliveryStatus\nfrom .models import CalcParameters\n\n\nclass DataService:\n def __init__(self):\n pass\n\n @staticmethod\n def get_all_orders():\n orders = Order.objects.order_by('-order_date')\n # create new variables for display\n for o in orders:\n o.package_names = ', '.join([p.name for p in list(o.packages.all())])\n o.delivery_date = o.deliveryinfo_set.get().delivery_date\n o.delivery_charge = o.deliveryinfo_set.get().charge\n return orders\n\n @staticmethod\n def get_all_packages():\n return Package.objects.all()\n\n @staticmethod\n def get_shopping_list_details(order_ids, dish_ids=None):\n \"\"\"\n :param order_ids: a list of order ids as int or str. Or a single order id as int or str\n :param dish_ids: Restrict shopping list to these dishes.\n A list of dish ids as int or str. Or a single order id as int or str.\n :return: Return shopping list for the given orders\n \"\"\"\n if isinstance(order_ids, str):\n order_ids = [int(order_ids)]\n if isinstance(order_ids, int):\n order_ids = [order_ids]\n if not isinstance(order_ids, list):\n raise Exception('Expecting a single order id or a list of order ids. Got [{ids}]'.format(ids=order_ids))\n\n SQL = \"\"\"select\n d.id dish_id,\n d.name dish_name,\n sum(op.package_qty) dish_qty,\n sum(d.portion_count) portion_count,\n i.name ingredient_name,\n round(sum(di.ingredient_weight * op.package_qty), 2) total_ingredient_weight,\n round(sum(di.ingredient_weight * (i.cost_price/i.measure) * op.package_qty), 2) total_cost_price\n from\n orders o, order_package op, package_dish pd, dish d, dish_ingredient di, ingredient i\n where\n o.id = op.order_id and\n op.package_id = pd.package_id and\n pd.dish_id = d.id and\n d.id = di.dish_id and\n di.ingredient_id = i.id and\n o.id in ({ids})\n group by d.id,\td.name, i.name\n order by d.name, i.name\"\"\".format(ids=','.join([str(x) for x in order_ids]))\n\n with connection.cursor() as cursor:\n cursor.execute(SQL)\n rows = cursor.fetchall()\n\n # return a list of tuples rather than a tuple of tuples\n return [row for row in rows]\n\n\nclass StaticDataDao(type):\n @property\n def delivery_statuses(cls):\n if getattr(cls, '_delivery_statuses', None) is None:\n cls._delivery_statuses = list(DeliveryStatus.objects.all())\n return cls._delivery_statuses\n\n @property\n def calc_parameters(cls):\n if getattr(cls, '_calc_parameters', None) is None:\n m = {}\n for p in list(CalcParameters.objects.all()):\n m[p.name] = p.value\n cls._calc_parameters = m\n return cls._calc_parameters\n\n\nclass StaticDataService(object):\n __metaclass__ = StaticDataDao\n",
"step-ids": [
5,
6,
7,
8,
12
]
}
|
[
5,
6,
7,
8,
12
] |
import webbrowser
import time
total = 3
count = 0
while count < total:
webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')
time.sleep(5 * 60 * 60)
count += 1
|
normal
|
{
"blob_id": "e11a04cad967ae377449aab8b12bfde23e403335",
"index": 8391,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-3": "<mask token>\ntotal = 3\ncount = 0\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-4": "import webbrowser\nimport time\ntotal = 3\ncount = 0\nwhile count < total:\n webbrowser.open('https://www.youtube.com/watch?v=GoSBNNgf_Vc')\n time.sleep(5 * 60 * 60)\n count += 1\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy
from math import cos, sin, radians, tan
class Window:
# construtor
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
# caso em q é None
if xyw_min is None or xyw_max is None:
self.xyw_min = (-100, -100)
self.xyw_max = (100, 100)
# caso em q n é None
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception('O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = (float(xyw_min[0]), float(xyw_min[1]))
except Exception:
raise Exception('As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception('O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = (float(xyw_max[0]), float(xyw_max[1]))
except Exception:
raise Exception('As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = (self.xyw_max[0], self.xyw_min[1])
self.xyw_3 = (self.xyw_min[0], self.xyw_max[1])
self.xyw_4 = self.xyw_max
# define o centro original da window(attr q pode ser usado para trazer a view de volta ao seu centro original)
self.center = self.calcCenter()
# define o novo centro(var que pode ser utilizada em futuros calculos envolvendo o centro da window)
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
# inicializa scn da window
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = (xmin, ymin)
def set_xyw_max(self, xmax, ymax):
self.xyw_max = (xmax, ymax)
# retorna as coordenadas (x,y) do centro da window
def calcCenter(self) -> (float, float):
return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] + self.xyw_max[1]) / 2
# retorna as coordenadas do canto inferior esquerdo e canto superior direito da window
def getCoords(self) -> (float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[1]
# retorna a largura e profundidade da window
def getWindowDimensions(self) -> (float, float):
xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])
xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])
xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])
return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)
# translada a window para cima, do ponto de vista do usuario
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.fatorMovimento * cos)
# translada a window para baixo, do ponto de vista do usuario
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=(-1) * self.fatorMovimento * sin, dy=(-1) * self.fatorMovimento * cos)
# translada a window para direita, do ponto de vista do usuario
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=(-1) * self.fatorMovimento * cos, dy=(-1) * self.fatorMovimento * sin)
# translada a window para esquerda, do ponto de vista do usuario
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.fatorMovimento * sin)
# realiza a translaçao da window
def _translate(self, dx=0, dy=0):
# cria a matriz de translacao do obj para um dx e dy qualquer
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# realiza a translacao
translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])
# atualiza a window
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, translate_matrix)
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Encolhe a window
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
# Aumenta a window
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
# Escalona a window
def _scale(self, scale=1):
# centro do obj
cx, cy = self.newCenter
# coords do mundo
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# realiza o escalonamento(num sei se esse e o termo correto)
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
# reverte o ajuste do centro do mundo com o obj
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
# monta uma matriz que aplica todas as transformacoes
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
# aplica as transformacoes
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)
# atualiza xyw_min/max
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Rotaciona a window no sentido horario
def rotateRight(self, angle):
# 360 - 10 = 350
self._rotate(360 - angle)
# Rotaciona a window no sentido anti-horario
def rotateLeft(self, angle):
self._rotate(angle)
# Rotaciona a window em relaçao ao seu proprio centro
def _rotate(self, angle=0):
self.degrees = (self.degrees + angle) % 360
# centro do obj
cx, cy = self.newCenter
# coords do mundo
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# realiza a rotacao
radians = numpy.radians(angle)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
# reverte a transformacao feita
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
# gera a matriz de transformacao de rotacao
transformations = numpy.matmul(translate_matrix_1, rotate_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
# aplica as transformacoes
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)
# atualiza xyw_min/max
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Calcula a matriz de transformaçao de sistemas de coordenadas da window
def scn(self):
# centro do obj
cx, cy = self.newCenter
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# pega ao INVERSO da rotacao atual da window
radians = numpy.radians((-1) * self.degrees)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
# rotaciona
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
length, height = self.getWindowDimensions()
sx = 1 / (length / 2)
sy = 1 / (height / 2)
# realiza o escalonamento(num sei se esse e o termo correto)
scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])
# gera a matriz de conversao para scn da window
scn = numpy.matmul(translate_matrix_1, rotate_matrix)
self.window_scn = numpy.matmul(scn, scale_matrix)
# Aplica a matriz de transformaçao de sistema de coordenadas da window a um ponto qualquer
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
|
normal
|
{
"blob_id": "deb0cd745eae97a6dbabdfab37e1c6d75e5372f0",
"index": 8422,
"step-1": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n <mask token>\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n <mask token>\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n <mask token>\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-2": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n <mask token>\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n <mask token>\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-3": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n\n def calcCenter(self) ->(float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +\n self.xyw_max[1]) / 2\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n\n def getWindowDimensions(self) ->(float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-4": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n\n def calcCenter(self) ->(float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +\n self.xyw_max[1]) / 2\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n\n def getWindowDimensions(self) ->(float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n\n def _rotate(self, angle=0):\n self.degrees = (self.degrees + angle) % 360\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n radians = numpy.radians(angle)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, rotate_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def scn(self):\n cx, cy = self.newCenter\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n radians = numpy.radians(-1 * self.degrees)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n length, height = self.getWindowDimensions()\n sx = 1 / (length / 2)\n sy = 1 / (height / 2)\n scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])\n scn = numpy.matmul(translate_matrix_1, rotate_matrix)\n self.window_scn = numpy.matmul(scn, scale_matrix)\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-5": "import numpy\nfrom math import cos, sin, radians, tan\n\nclass Window:\n # construtor\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n # caso em q é None\n if xyw_min is None or xyw_max is None:\n self.xyw_min = (-100, -100)\n self.xyw_max = (100, 100)\n # caso em q n é None\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception('O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = (float(xyw_min[0]), float(xyw_min[1]))\n except Exception:\n raise Exception('As coordenadas xyw_min devem ser pares de números.')\n\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception('O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = (float(xyw_max[0]), float(xyw_max[1]))\n except Exception:\n raise Exception('As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = (self.xyw_max[0], self.xyw_min[1])\n self.xyw_3 = (self.xyw_min[0], self.xyw_max[1])\n self.xyw_4 = self.xyw_max\n # define o centro original da window(attr q pode ser usado para trazer a view de volta ao seu centro original)\n self.center = self.calcCenter()\n # define o novo centro(var que pode ser utilizada em futuros calculos envolvendo o centro da window)\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n # inicializa scn da window\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = (xmin, ymin)\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = (xmax, ymax)\n\n # retorna as coordenadas (x,y) do centro da window\n def calcCenter(self) -> (float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] + self.xyw_max[1]) / 2\n\n # retorna as coordenadas do canto inferior esquerdo e canto superior direito da window\n def getCoords(self) -> (float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[1]\n\n # retorna a largura e profundidade da window\n def getWindowDimensions(self) -> (float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n # translada a window para cima, do ponto de vista do usuario\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.fatorMovimento * cos)\n\n # translada a window para baixo, do ponto de vista do usuario\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=(-1) * self.fatorMovimento * sin, dy=(-1) * self.fatorMovimento * cos)\n\n # translada a window para direita, do ponto de vista do usuario\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=(-1) * self.fatorMovimento * cos, dy=(-1) * self.fatorMovimento * sin)\n\n # translada a window para esquerda, do ponto de vista do usuario\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.fatorMovimento * sin)\n\n # realiza a translaçao da window\n def _translate(self, dx=0, dy=0):\n # cria a matriz de translacao do obj para um dx e dy qualquer\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # realiza a translacao\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n # atualiza a window\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, translate_matrix)\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Encolhe a window\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n # Aumenta a window\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n # Escalona a window\n def _scale(self, scale=1):\n # centro do obj\n cx, cy = self.newCenter\n # coords do mundo\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # realiza o escalonamento(num sei se esse e o termo correto)\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n # reverte o ajuste do centro do mundo com o obj\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n # monta uma matriz que aplica todas as transformacoes\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n # aplica as transformacoes\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)\n # atualiza xyw_min/max\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Rotaciona a window no sentido horario\n def rotateRight(self, angle):\n # 360 - 10 = 350\n self._rotate(360 - angle)\n\n # Rotaciona a window no sentido anti-horario\n def rotateLeft(self, angle):\n self._rotate(angle)\n\n # Rotaciona a window em relaçao ao seu proprio centro\n def _rotate(self, angle=0):\n self.degrees = (self.degrees + angle) % 360\n # centro do obj\n cx, cy = self.newCenter\n # coords do mundo\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # realiza a rotacao\n radians = numpy.radians(angle)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n # reverte a transformacao feita\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n # gera a matriz de transformacao de rotacao\n transformations = numpy.matmul(translate_matrix_1, rotate_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n # aplica as transformacoes\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)\n # atualiza xyw_min/max\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Calcula a matriz de transformaçao de sistemas de coordenadas da window\n def scn(self):\n # centro do obj\n cx, cy = self.newCenter\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # pega ao INVERSO da rotacao atual da window\n radians = numpy.radians((-1) * self.degrees)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n # rotaciona\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n length, height = self.getWindowDimensions()\n sx = 1 / (length / 2)\n sy = 1 / (height / 2)\n # realiza o escalonamento(num sei se esse e o termo correto)\n scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])\n # gera a matriz de conversao para scn da window\n scn = numpy.matmul(translate_matrix_1, rotate_matrix)\n self.window_scn = numpy.matmul(scn, scale_matrix)\n\n # Aplica a matriz de transformaçao de sistema de coordenadas da window a um ponto qualquer\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-ids": [
15,
16,
18,
20,
22
]
}
|
[
15,
16,
18,
20,
22
] |
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
from sklearn.svm import SVR
# In[2]:
from sklearn.preprocessing import StandardScaler
# In[3]:
#import matplotlib.pyplot as plt
# %matplotlib inline
# In[90]:
aapl = pd.read_csv('return_fcast.csv')
# In[79]:
y = aapl['return']
# In[80]:
X = aapl[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]
# In[14]:
X = X.reshape((2475,8))
# In[21]:
y = np.array(y).reshape((2475,1))
# In[22]:
sc_X = StandardScaler()
sc_y = StandardScaler()
X = sc_X.fit_transform(X)
y = sc_y.fit_transform(y)
# In[25]:
regressor = SVR(kernel='rbf')
regressor.fit(X,y)
# In[27]:
testing_df = pd.read_csv('testing.csv')
# In[28]:
X_test = testing_df[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]
# In[29]:
X_test.shape
# In[33]:
X_test = sc_X.fit_transform(X_test)
# In[34]:
# In[35]:
y_pred
# In[36]:
y_pred = regressor.predict(X_test)
y_pred = sc_y.inverse_transform(y_pred)
# In[37]:
y_pred
# In[38]:
for i in range(len(y_pred)):
print(y_pred[i])
# In[3]:
# run SVR for the AXP-...DD stocks
axp = pd.DataFrame(columns=aapl.columns)
# In[3]:
#stocks = ['AAPL','AXP','BA','CAT','CSCO','CVX','DIS','DD','GS']
stocks = ['MCD']
# In[4]:
# read indicators 09-18
ADXR = pd.read_csv('data/djADXR.csv')
ATR = pd.read_csv('data/djATR.csv')
SMA = pd.read_csv('data/sma.csv')
Hurst = pd.read_csv('data/hurst.csv')
EMA = pd.read_csv('data/ema.csv')
MACD = pd.read_csv('data/macd.csv')
VIX = pd.read_csv('data/vix.csv')
RSI = pd.read_csv('data/rsi.csv')
# In[5]:
VIX.iloc[40:2476]
# In[121]:
# read stock prices 09-18
# In[3]:
dj_df = pd.read_csv('data/djindex.csv')
# In[7]:
dj_df = pd.read_csv('data/djindex.csv')
#dj_df = dj_df[['Date','AAPL','AXP','BA','CAT','CSCO','CVX','DIS','DD','GS']].iloc[39:2516]
#dj_df = dj_df[['Date','MCD']].iloc[39:2516]
return_df = pd.DataFrame(columns=dj_df.columns[1:],index=dj_df['Date'])
for i in dj_df.columns[1:]:
return_df[i] = list(np.log(dj_df[i]/dj_df[i].shift(1)))
# In[9]:
return_df = return_df.dropna()
# In[10]:
cov = return_df.cov()
cov.to_csv('cov0918.csv')
# In[41]:
list(dj_df.columns[1:])
# In[39]:
dj_df.index = dj_df
# In[65]:
stock18 = dj_df[dj_df.columns[1:]].iloc[-252:]
# In[67]:
stock18
# In[44]:
dj_df
# In[68]:
dj_df = pd.read_csv('data/djindex.csv')
dj_df.index = dj_df['Date']
return_df = pd.DataFrame(columns=list(dj_df.columns[1:]))
for i in dj_df.columns[1:]:
return_df[i] = list(np.log(stock18[i]/stock18[i].shift(1)))
# In[69]:
return_df = return_df.dropna()
# In[70]:
return_df
# In[71]:
cov = return_df.cov()
# In[72]:
cov
# In[73]:
cov.to_csv('cov.csv')
# In[10]:
# store return prediction
result = pd.DataFrame(columns=stocks)
# In[21]:
# indicators forecast
ADXR_f = pd.read_csv('data/tesingadxr740.csv')
ATR_f = pd.read_csv('data/tesingatr740.csv')
SMA_f = pd.read_csv('data/sma_forecast.csv')
Hurst_f = pd.read_csv('data/hurst_forecast.csv')
EMA_f = pd.read_csv('data/ema_fcast.csv')
MACD_f = pd.read_csv('data/macd_fcast.csv')
VIX_f = pd.read_csv('data/vix_fcast.csv')
RSI_f = pd.read_csv('data/rsi_fcast.csv')
# In[22]:
# Initialized scaler in order to transform variables into (-1,1)
sc_X = StandardScaler()
sc_y = StandardScaler()
regressor = SVR(kernel='rbf')
temp = pd.DataFrame(columns=['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI'])
temp['VIX'] = list(VIX['VIX'].iloc[40:2516]) # all stocks share the same vix
temp_f = pd.DataFrame(columns=['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI'])
temp_f['VIX'] = list(VIX_f['VIX Forecast'].iloc[0:250])
for i in ['MCD']: # iterate each stock
# First, extract training data set, including indicators(X) and return(y)
temp['ADXR'] = list(ADXR[i].iloc[40:2516])
temp['ATR'] = list(ATR[i].iloc[40:2516])
temp['SMA'] = list(SMA['SMA_'+i].iloc[40:2516])
temp['Hurst'] = list(Hurst['Hurst'+i].iloc[40:2516])
temp['EMA'] = list(EMA[i].iloc[40:2516])
temp['MACD'] = list(MACD[i].iloc[40:2516])
temp['RSI'] = list(RSI[i].iloc[40:2516])
# transformation
X = sc_X.fit_transform(temp[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']])
#print(X.shape)
y = sc_y.fit_transform(np.array(return_df[i].dropna()).reshape(2476,1))
#print(y.shape)
# training
regressor.fit(X,y)
# predicting
temp_f['ADXR'] = list(ADXR_f[i+'.1976.10.11.20.00.00'].iloc[0:250])
temp_f['ATR'] = list(ATR_f[i+'.1976.11.07.19.00.00'].iloc[0:250])
temp_f['SMA'] = list(SMA_f[i].iloc[0:250])
temp_f['Hurst'] = list(Hurst_f[i].iloc[0:250])
temp_f['EMA'] = list(EMA_f[i].iloc[0:250])
temp_f['MACD'] = list(MACD_f[i].iloc[0:250])
temp_f['RSI'] = list(RSI_f[i].iloc[0:250])
X_test = temp_f[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]
X_test = sc_X.fit_transform(X_test)
y_pred = regressor.predict(X_test)
y_pred = sc_y.inverse_transform(y_pred)
# write predicted returns into result
result[i] = y_pred
print(i)
# In[23]:
result.to_csv('mac_fcast.csv')
# In[161]:
ADXR['AAPL'].iloc[40:2516]
# In[ ]:
|
normal
|
{
"blob_id": "4a8d203872a1e86c54142dea6cd04c1cac6bcfb2",
"index": 5067,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nregressor.fit(X, y)\n<mask token>\nX_test.shape\n<mask token>\ny_pred\n<mask token>\ny_pred\nfor i in range(len(y_pred)):\n print(y_pred[i])\n<mask token>\nVIX.iloc[40:2476]\n<mask token>\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(dj_df[i] / dj_df[i].shift(1)))\n<mask token>\ncov.to_csv('cov0918.csv')\nlist(dj_df.columns[1:])\n<mask token>\nstock18\ndj_df\n<mask token>\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(stock18[i] / stock18[i].shift(1)))\n<mask token>\nreturn_df\n<mask token>\ncov\ncov.to_csv('cov.csv')\n<mask token>\nfor i in ['MCD']:\n temp['ADXR'] = list(ADXR[i].iloc[40:2516])\n temp['ATR'] = list(ATR[i].iloc[40:2516])\n temp['SMA'] = list(SMA['SMA_' + i].iloc[40:2516])\n temp['Hurst'] = list(Hurst['Hurst' + i].iloc[40:2516])\n temp['EMA'] = list(EMA[i].iloc[40:2516])\n temp['MACD'] = list(MACD[i].iloc[40:2516])\n temp['RSI'] = list(RSI[i].iloc[40:2516])\n X = sc_X.fit_transform(temp[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA',\n 'MACD', 'VIX', 'RSI']])\n y = sc_y.fit_transform(np.array(return_df[i].dropna()).reshape(2476, 1))\n regressor.fit(X, y)\n temp_f['ADXR'] = list(ADXR_f[i + '.1976.10.11.20.00.00'].iloc[0:250])\n temp_f['ATR'] = list(ATR_f[i + '.1976.11.07.19.00.00'].iloc[0:250])\n temp_f['SMA'] = list(SMA_f[i].iloc[0:250])\n temp_f['Hurst'] = list(Hurst_f[i].iloc[0:250])\n temp_f['EMA'] = list(EMA_f[i].iloc[0:250])\n temp_f['MACD'] = list(MACD_f[i].iloc[0:250])\n temp_f['RSI'] = list(RSI_f[i].iloc[0:250])\n X_test = temp_f[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX',\n 'RSI']]\n X_test = sc_X.fit_transform(X_test)\n y_pred = regressor.predict(X_test)\n y_pred = sc_y.inverse_transform(y_pred)\n result[i] = y_pred\n print(i)\nresult.to_csv('mac_fcast.csv')\nADXR['AAPL'].iloc[40:2516]\n",
"step-3": "<mask token>\naapl = pd.read_csv('return_fcast.csv')\ny = aapl['return']\nX = aapl[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX', 'RSI']]\nX = X.reshape((2475, 8))\ny = np.array(y).reshape((2475, 1))\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nX = sc_X.fit_transform(X)\ny = sc_y.fit_transform(y)\nregressor = SVR(kernel='rbf')\nregressor.fit(X, y)\ntesting_df = pd.read_csv('testing.csv')\nX_test = testing_df[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX',\n 'RSI']]\nX_test.shape\nX_test = sc_X.fit_transform(X_test)\ny_pred\ny_pred = regressor.predict(X_test)\ny_pred = sc_y.inverse_transform(y_pred)\ny_pred\nfor i in range(len(y_pred)):\n print(y_pred[i])\naxp = pd.DataFrame(columns=aapl.columns)\nstocks = ['MCD']\nADXR = pd.read_csv('data/djADXR.csv')\nATR = pd.read_csv('data/djATR.csv')\nSMA = pd.read_csv('data/sma.csv')\nHurst = pd.read_csv('data/hurst.csv')\nEMA = pd.read_csv('data/ema.csv')\nMACD = pd.read_csv('data/macd.csv')\nVIX = pd.read_csv('data/vix.csv')\nRSI = pd.read_csv('data/rsi.csv')\nVIX.iloc[40:2476]\ndj_df = pd.read_csv('data/djindex.csv')\ndj_df = pd.read_csv('data/djindex.csv')\nreturn_df = pd.DataFrame(columns=dj_df.columns[1:], index=dj_df['Date'])\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(dj_df[i] / dj_df[i].shift(1)))\nreturn_df = return_df.dropna()\ncov = return_df.cov()\ncov.to_csv('cov0918.csv')\nlist(dj_df.columns[1:])\ndj_df.index = dj_df\nstock18 = dj_df[dj_df.columns[1:]].iloc[-252:]\nstock18\ndj_df\ndj_df = pd.read_csv('data/djindex.csv')\ndj_df.index = dj_df['Date']\nreturn_df = pd.DataFrame(columns=list(dj_df.columns[1:]))\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(stock18[i] / stock18[i].shift(1)))\nreturn_df = return_df.dropna()\nreturn_df\ncov = return_df.cov()\ncov\ncov.to_csv('cov.csv')\nresult = pd.DataFrame(columns=stocks)\nADXR_f = pd.read_csv('data/tesingadxr740.csv')\nATR_f = pd.read_csv('data/tesingatr740.csv')\nSMA_f = pd.read_csv('data/sma_forecast.csv')\nHurst_f = pd.read_csv('data/hurst_forecast.csv')\nEMA_f = pd.read_csv('data/ema_fcast.csv')\nMACD_f = pd.read_csv('data/macd_fcast.csv')\nVIX_f = pd.read_csv('data/vix_fcast.csv')\nRSI_f = pd.read_csv('data/rsi_fcast.csv')\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nregressor = SVR(kernel='rbf')\ntemp = pd.DataFrame(columns=['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD',\n 'VIX', 'RSI'])\ntemp['VIX'] = list(VIX['VIX'].iloc[40:2516])\ntemp_f = pd.DataFrame(columns=['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD',\n 'VIX', 'RSI'])\ntemp_f['VIX'] = list(VIX_f['VIX Forecast'].iloc[0:250])\nfor i in ['MCD']:\n temp['ADXR'] = list(ADXR[i].iloc[40:2516])\n temp['ATR'] = list(ATR[i].iloc[40:2516])\n temp['SMA'] = list(SMA['SMA_' + i].iloc[40:2516])\n temp['Hurst'] = list(Hurst['Hurst' + i].iloc[40:2516])\n temp['EMA'] = list(EMA[i].iloc[40:2516])\n temp['MACD'] = list(MACD[i].iloc[40:2516])\n temp['RSI'] = list(RSI[i].iloc[40:2516])\n X = sc_X.fit_transform(temp[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA',\n 'MACD', 'VIX', 'RSI']])\n y = sc_y.fit_transform(np.array(return_df[i].dropna()).reshape(2476, 1))\n regressor.fit(X, y)\n temp_f['ADXR'] = list(ADXR_f[i + '.1976.10.11.20.00.00'].iloc[0:250])\n temp_f['ATR'] = list(ATR_f[i + '.1976.11.07.19.00.00'].iloc[0:250])\n temp_f['SMA'] = list(SMA_f[i].iloc[0:250])\n temp_f['Hurst'] = list(Hurst_f[i].iloc[0:250])\n temp_f['EMA'] = list(EMA_f[i].iloc[0:250])\n temp_f['MACD'] = list(MACD_f[i].iloc[0:250])\n temp_f['RSI'] = list(RSI_f[i].iloc[0:250])\n X_test = temp_f[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX',\n 'RSI']]\n X_test = sc_X.fit_transform(X_test)\n y_pred = regressor.predict(X_test)\n y_pred = sc_y.inverse_transform(y_pred)\n result[i] = y_pred\n print(i)\nresult.to_csv('mac_fcast.csv')\nADXR['AAPL'].iloc[40:2516]\n",
"step-4": "import numpy as np\nimport pandas as pd\nfrom sklearn.svm import SVR\nfrom sklearn.preprocessing import StandardScaler\naapl = pd.read_csv('return_fcast.csv')\ny = aapl['return']\nX = aapl[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX', 'RSI']]\nX = X.reshape((2475, 8))\ny = np.array(y).reshape((2475, 1))\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nX = sc_X.fit_transform(X)\ny = sc_y.fit_transform(y)\nregressor = SVR(kernel='rbf')\nregressor.fit(X, y)\ntesting_df = pd.read_csv('testing.csv')\nX_test = testing_df[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX',\n 'RSI']]\nX_test.shape\nX_test = sc_X.fit_transform(X_test)\ny_pred\ny_pred = regressor.predict(X_test)\ny_pred = sc_y.inverse_transform(y_pred)\ny_pred\nfor i in range(len(y_pred)):\n print(y_pred[i])\naxp = pd.DataFrame(columns=aapl.columns)\nstocks = ['MCD']\nADXR = pd.read_csv('data/djADXR.csv')\nATR = pd.read_csv('data/djATR.csv')\nSMA = pd.read_csv('data/sma.csv')\nHurst = pd.read_csv('data/hurst.csv')\nEMA = pd.read_csv('data/ema.csv')\nMACD = pd.read_csv('data/macd.csv')\nVIX = pd.read_csv('data/vix.csv')\nRSI = pd.read_csv('data/rsi.csv')\nVIX.iloc[40:2476]\ndj_df = pd.read_csv('data/djindex.csv')\ndj_df = pd.read_csv('data/djindex.csv')\nreturn_df = pd.DataFrame(columns=dj_df.columns[1:], index=dj_df['Date'])\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(dj_df[i] / dj_df[i].shift(1)))\nreturn_df = return_df.dropna()\ncov = return_df.cov()\ncov.to_csv('cov0918.csv')\nlist(dj_df.columns[1:])\ndj_df.index = dj_df\nstock18 = dj_df[dj_df.columns[1:]].iloc[-252:]\nstock18\ndj_df\ndj_df = pd.read_csv('data/djindex.csv')\ndj_df.index = dj_df['Date']\nreturn_df = pd.DataFrame(columns=list(dj_df.columns[1:]))\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(stock18[i] / stock18[i].shift(1)))\nreturn_df = return_df.dropna()\nreturn_df\ncov = return_df.cov()\ncov\ncov.to_csv('cov.csv')\nresult = pd.DataFrame(columns=stocks)\nADXR_f = pd.read_csv('data/tesingadxr740.csv')\nATR_f = pd.read_csv('data/tesingatr740.csv')\nSMA_f = pd.read_csv('data/sma_forecast.csv')\nHurst_f = pd.read_csv('data/hurst_forecast.csv')\nEMA_f = pd.read_csv('data/ema_fcast.csv')\nMACD_f = pd.read_csv('data/macd_fcast.csv')\nVIX_f = pd.read_csv('data/vix_fcast.csv')\nRSI_f = pd.read_csv('data/rsi_fcast.csv')\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nregressor = SVR(kernel='rbf')\ntemp = pd.DataFrame(columns=['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD',\n 'VIX', 'RSI'])\ntemp['VIX'] = list(VIX['VIX'].iloc[40:2516])\ntemp_f = pd.DataFrame(columns=['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD',\n 'VIX', 'RSI'])\ntemp_f['VIX'] = list(VIX_f['VIX Forecast'].iloc[0:250])\nfor i in ['MCD']:\n temp['ADXR'] = list(ADXR[i].iloc[40:2516])\n temp['ATR'] = list(ATR[i].iloc[40:2516])\n temp['SMA'] = list(SMA['SMA_' + i].iloc[40:2516])\n temp['Hurst'] = list(Hurst['Hurst' + i].iloc[40:2516])\n temp['EMA'] = list(EMA[i].iloc[40:2516])\n temp['MACD'] = list(MACD[i].iloc[40:2516])\n temp['RSI'] = list(RSI[i].iloc[40:2516])\n X = sc_X.fit_transform(temp[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA',\n 'MACD', 'VIX', 'RSI']])\n y = sc_y.fit_transform(np.array(return_df[i].dropna()).reshape(2476, 1))\n regressor.fit(X, y)\n temp_f['ADXR'] = list(ADXR_f[i + '.1976.10.11.20.00.00'].iloc[0:250])\n temp_f['ATR'] = list(ATR_f[i + '.1976.11.07.19.00.00'].iloc[0:250])\n temp_f['SMA'] = list(SMA_f[i].iloc[0:250])\n temp_f['Hurst'] = list(Hurst_f[i].iloc[0:250])\n temp_f['EMA'] = list(EMA_f[i].iloc[0:250])\n temp_f['MACD'] = list(MACD_f[i].iloc[0:250])\n temp_f['RSI'] = list(RSI_f[i].iloc[0:250])\n X_test = temp_f[['ADXR', 'ATR', 'SMA', 'Hurst', 'EMA', 'MACD', 'VIX',\n 'RSI']]\n X_test = sc_X.fit_transform(X_test)\n y_pred = regressor.predict(X_test)\n y_pred = sc_y.inverse_transform(y_pred)\n result[i] = y_pred\n print(i)\nresult.to_csv('mac_fcast.csv')\nADXR['AAPL'].iloc[40:2516]\n",
"step-5": "\n# coding: utf-8\n\n# In[1]:\n\nimport numpy as np\n\nimport pandas as pd\nfrom sklearn.svm import SVR\n\n\n# In[2]:\n\nfrom sklearn.preprocessing import StandardScaler\n\n\n# In[3]:\n\n#import matplotlib.pyplot as plt\n# %matplotlib inline\n\n\n# In[90]:\n\naapl = pd.read_csv('return_fcast.csv')\n\n\n# In[79]:\n\ny = aapl['return']\n\n\n# In[80]:\n\nX = aapl[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]\n\n\n# In[14]:\n\nX = X.reshape((2475,8))\n\n\n# In[21]:\n\ny = np.array(y).reshape((2475,1))\n\n\n# In[22]:\n\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nX = sc_X.fit_transform(X)\ny = sc_y.fit_transform(y)\n\n\n# In[25]:\n\nregressor = SVR(kernel='rbf')\nregressor.fit(X,y)\n\n\n# In[27]:\n\ntesting_df = pd.read_csv('testing.csv')\n\n\n# In[28]:\n\nX_test = testing_df[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]\n\n\n# In[29]:\n\nX_test.shape\n\n\n# In[33]:\n\nX_test = sc_X.fit_transform(X_test)\n\n\n# In[34]:\n\n\n\n\n# In[35]:\n\ny_pred\n\n\n# In[36]:\n\ny_pred = regressor.predict(X_test)\ny_pred = sc_y.inverse_transform(y_pred)\n\n\n# In[37]:\n\ny_pred\n\n\n# In[38]:\n\nfor i in range(len(y_pred)):\n print(y_pred[i])\n\n\n# In[3]:\n\n# run SVR for the AXP-...DD stocks\naxp = pd.DataFrame(columns=aapl.columns)\n\n\n# In[3]:\n\n#stocks = ['AAPL','AXP','BA','CAT','CSCO','CVX','DIS','DD','GS']\nstocks = ['MCD']\n\n\n# In[4]:\n\n# read indicators 09-18\nADXR = pd.read_csv('data/djADXR.csv')\nATR = pd.read_csv('data/djATR.csv')\nSMA = pd.read_csv('data/sma.csv')\nHurst = pd.read_csv('data/hurst.csv')\nEMA = pd.read_csv('data/ema.csv')\nMACD = pd.read_csv('data/macd.csv')\nVIX = pd.read_csv('data/vix.csv')\nRSI = pd.read_csv('data/rsi.csv')\n\n\n# In[5]:\n\nVIX.iloc[40:2476]\n\n\n# In[121]:\n\n# read stock prices 09-18\n\n\n# In[3]:\n\ndj_df = pd.read_csv('data/djindex.csv')\n\n\n# In[7]:\n\ndj_df = pd.read_csv('data/djindex.csv')\n#dj_df = dj_df[['Date','AAPL','AXP','BA','CAT','CSCO','CVX','DIS','DD','GS']].iloc[39:2516]\n#dj_df = dj_df[['Date','MCD']].iloc[39:2516]\n\nreturn_df = pd.DataFrame(columns=dj_df.columns[1:],index=dj_df['Date'])\n\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(dj_df[i]/dj_df[i].shift(1)))\n\n\n# In[9]:\n\nreturn_df = return_df.dropna()\n\n\n# In[10]:\n\ncov = return_df.cov()\ncov.to_csv('cov0918.csv')\n\n\n# In[41]:\n\nlist(dj_df.columns[1:])\n\n\n# In[39]:\n\ndj_df.index = dj_df\n\n\n# In[65]:\n\nstock18 = dj_df[dj_df.columns[1:]].iloc[-252:]\n\n\n# In[67]:\n\nstock18\n\n\n# In[44]:\n\ndj_df\n\n\n# In[68]:\n\ndj_df = pd.read_csv('data/djindex.csv')\ndj_df.index = dj_df['Date']\n\nreturn_df = pd.DataFrame(columns=list(dj_df.columns[1:]))\n\nfor i in dj_df.columns[1:]:\n return_df[i] = list(np.log(stock18[i]/stock18[i].shift(1)))\n\n\n# In[69]:\n\nreturn_df = return_df.dropna()\n\n\n# In[70]:\n\nreturn_df\n\n\n# In[71]:\n\ncov = return_df.cov()\n\n\n# In[72]:\n\ncov\n\n\n# In[73]:\n\ncov.to_csv('cov.csv')\n\n\n# In[10]:\n\n# store return prediction\nresult = pd.DataFrame(columns=stocks)\n\n\n# In[21]:\n\n# indicators forecast\nADXR_f = pd.read_csv('data/tesingadxr740.csv')\nATR_f = pd.read_csv('data/tesingatr740.csv')\nSMA_f = pd.read_csv('data/sma_forecast.csv')\nHurst_f = pd.read_csv('data/hurst_forecast.csv')\nEMA_f = pd.read_csv('data/ema_fcast.csv')\nMACD_f = pd.read_csv('data/macd_fcast.csv')\nVIX_f = pd.read_csv('data/vix_fcast.csv')\nRSI_f = pd.read_csv('data/rsi_fcast.csv')\n\n\n# In[22]:\n\n# Initialized scaler in order to transform variables into (-1,1)\nsc_X = StandardScaler()\nsc_y = StandardScaler()\nregressor = SVR(kernel='rbf')\n\ntemp = pd.DataFrame(columns=['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI'])\ntemp['VIX'] = list(VIX['VIX'].iloc[40:2516]) # all stocks share the same vix\n\ntemp_f = pd.DataFrame(columns=['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI'])\ntemp_f['VIX'] = list(VIX_f['VIX Forecast'].iloc[0:250])\n\nfor i in ['MCD']: # iterate each stock\n \n # First, extract training data set, including indicators(X) and return(y) \n temp['ADXR'] = list(ADXR[i].iloc[40:2516])\n temp['ATR'] = list(ATR[i].iloc[40:2516])\n temp['SMA'] = list(SMA['SMA_'+i].iloc[40:2516])\n temp['Hurst'] = list(Hurst['Hurst'+i].iloc[40:2516])\n temp['EMA'] = list(EMA[i].iloc[40:2516])\n temp['MACD'] = list(MACD[i].iloc[40:2516])\n temp['RSI'] = list(RSI[i].iloc[40:2516])\n # transformation\n X = sc_X.fit_transform(temp[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']])\n #print(X.shape)\n y = sc_y.fit_transform(np.array(return_df[i].dropna()).reshape(2476,1))\n #print(y.shape)\n # training \n regressor.fit(X,y)\n # predicting\n temp_f['ADXR'] = list(ADXR_f[i+'.1976.10.11.20.00.00'].iloc[0:250])\n temp_f['ATR'] = list(ATR_f[i+'.1976.11.07.19.00.00'].iloc[0:250])\n temp_f['SMA'] = list(SMA_f[i].iloc[0:250])\n temp_f['Hurst'] = list(Hurst_f[i].iloc[0:250])\n temp_f['EMA'] = list(EMA_f[i].iloc[0:250])\n temp_f['MACD'] = list(MACD_f[i].iloc[0:250])\n temp_f['RSI'] = list(RSI_f[i].iloc[0:250])\n X_test = temp_f[['ADXR','ATR','SMA','Hurst','EMA','MACD','VIX','RSI']]\n X_test = sc_X.fit_transform(X_test)\n y_pred = regressor.predict(X_test)\n y_pred = sc_y.inverse_transform(y_pred)\n # write predicted returns into result\n result[i] = y_pred\n print(i)\n\n\n# In[23]:\n\nresult.to_csv('mac_fcast.csv')\n\n\n# In[161]:\n\nADXR['AAPL'].iloc[40:2516]\n\n\n# In[ ]:\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import division
from collections import deque
import os
import warnings
import numpy as np
import keras.backend as K
import keras.layers as layers
import keras.optimizers as optimizers
from rl.core import Agent
from rl.util import *
def mean_q(y_true, y_pred):
return K.mean(K.max(y_pred, axis=-1))
# Deep DPG as described by Lillicrap et al. (2015)
# http://arxiv.org/pdf/1509.02971v2.pdf
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.646.4324&rep=rep1&type=pdf
class UBDDPGAgent(Agent):
"""Write me
"""
def __init__(self, nb_actions, actor, critic, nb_players, critic_action_inputs, memory,
gamma=.99, batch_size=32, nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,
train_interval=1, memory_interval=1, delta_range=None, delta_clip=np.inf,
random_process=None, custom_model_objects={}, target_model_update=.001, **kwargs):
assert len(critic_action_inputs) == nb_players
if hasattr(actor.output, '__len__') and len(actor.output) != nb_players:
raise ValueError((
'Actor "{}" does not have the right number of ',
'outputs. DDPG expects an actor that has {} outputs.'
).format(actor, nb_players))
# if hasattr(critic.output, '__len__') and len(critic.output) > 1:
# raise ValueError('Critic "{}" has more than one output. DDPG expects a critic that has a single output.'.format(critic))
for critic_action_input in critic_action_inputs:
if critic_action_input not in critic.input:
raise ValueError('Critic "{}" does not have designated action input "{}".'.format(critic, critic_action_input))
if not hasattr(critic.input, '__len__') or len(critic.input) < 2:
raise ValueError('Critic "{}" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'.format(critic))
super(UBDDPGAgent, self).__init__(**kwargs)
# Soft vs hard target model updates.
if target_model_update < 0:
raise ValueError('`target_model_update` must be >= 0.')
elif target_model_update >= 1:
# Hard update every `target_model_update` steps.
target_model_update = int(target_model_update)
else:
# Soft update with `(1 - target_model_update) * old + target_model_update * new`.
target_model_update = float(target_model_update)
if delta_range is not None:
warnings.warn('`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we\'re falling back to `delta_range[1] = {}`'.format(delta_range[1]))
delta_clip = delta_range[1]
# Parameters.
self.nb_actions = nb_actions
self.nb_steps_warmup_actor = nb_steps_warmup_actor
self.nb_steps_warmup_critic = nb_steps_warmup_critic
self.random_process = random_process
self.delta_clip = delta_clip
self.gamma = gamma
self.target_model_update = target_model_update
self.batch_size = batch_size
self.train_interval = train_interval
self.memory_interval = memory_interval
self.custom_model_objects = custom_model_objects
# Related objects.
self.actor = actor
self.critic = critic
self.nb_players = nb_players
self.critic_action_inputs = critic_action_inputs
self.critic_action_input_idxes = [
self.critic.input.index(critic_action_input)
for critic_action_input in critic_action_inputs
]
self.memory = memory
# State.
self.compiled = False
self.reset_states()
@property
def uses_learning_phase(self):
return self.actor.uses_learning_phase or self.critic.uses_learning_phase
def compile(self, optimizer, metrics=[]):
metrics += [mean_q]
if type(optimizer) in (list, tuple):
if len(optimizer) != 2:
raise ValueError('More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.')
actor_optimizer, critic_optimizer = optimizer
else:
actor_optimizer = optimizer
critic_optimizer = clone_optimizer(optimizer)
if type(actor_optimizer) is str:
actor_optimizer = optimizers.get(actor_optimizer)
if type(critic_optimizer) is str:
critic_optimizer = optimizers.get(critic_optimizer)
assert actor_optimizer != critic_optimizer
if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(metrics[1], '__len__'):
actor_metrics, critic_metrics = metrics
else:
actor_metrics = critic_metrics = metrics
def clipped_error(y_true, y_pred):
y_true = K.squeeze(y_true, axis=-1)
y_pred = K.squeeze(y_pred, axis=-1)
loss = K.mean(
# K.random_uniform(shape=(self.batch_size, self.nb_players), minval=0., maxval=1.) *
huber_loss(y_true, y_pred, self.delta_clip),
axis=-1)
# y_true = K.print_tensor(y_true, message='y_true: ')
# y_pred = K.print_tensor(y_pred, message='y_pred: ')
# loss = K.print_tensor(loss, message='loss: ')
return loss
# Compile target networks. We only use them in feed-forward mode, hence we can pass any
# optimizer and loss since we never use it anyway.
self.target_actor = clone_model(self.actor, self.custom_model_objects)
self.target_actor.compile(optimizer='sgd', loss='mse')
self.target_critic = clone_model(self.critic, self.custom_model_objects)
self.target_critic.compile(optimizer='sgd', loss='mse')
# We also compile the actor. We never optimize the actor using Keras but instead compute
# the policy gradient ourselves. However, we need the actor in feed-forward mode, hence
# we also compile it with any optimzer and
self.actor.compile(optimizer='sgd', loss='mse')
# Compile the critic.
if self.target_model_update < 1.:
# We use the `AdditionalUpdatesOptimizer` to efficiently soft-update the target model.
critic_updates = get_soft_target_model_updates(self.target_critic, self.critic, self.target_model_update)
critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer, critic_updates)
self.critic.compile(
optimizer=critic_optimizer,
loss=[clipped_error]*self.nb_players,
metrics=critic_metrics)
# Combine actor and critic so that we can get the policy gradient.
# Assuming critic's state inputs are the same as actor's.
critic_inputs = []
actor_inputs = []
for i in self.critic.input:
if i in self.critic_action_inputs:
critic_inputs.append([])
else:
critic_inputs.append(i)
actor_inputs.append(i)
actor_outputs = self.actor(actor_inputs)
if not isinstance(actor_outputs, (list,)):
actor_outputs = [actor_outputs]
assert len(actor_outputs) == self.nb_players
for input_idx, actor_output in zip(self.critic_action_input_idxes, actor_outputs):
critic_inputs[input_idx] = actor_output
# critic_outputs = layers.Maximum()(self.critic(critic_inputs))
critic_outputs = self.critic(critic_inputs)
if not isinstance(critic_outputs, (list,)):
critic_outputs = [critic_outputs]
assert len(critic_outputs) == self.nb_players
actor_losses = [None]* self.nb_players
for input_idx, critic_output in zip(self.critic_action_input_idxes, critic_outputs):
actor_losses[input_idx] = -K.mean(critic_output)
updates = actor_optimizer.get_updates(
params=self.actor.trainable_weights,
loss=actor_losses)
if self.target_model_update < 1.:
# Include soft target model updates.
updates += get_soft_target_model_updates(self.target_actor, self.actor, self.target_model_update)
updates += self.actor.updates # include other updates of the actor, e.g. for BN
# Finally, combine it all into a callable function.
if K.backend() == 'tensorflow':
self.actor_train_fn = K.function(actor_inputs + [K.learning_phase()],
actor_outputs, updates=updates)
else:
if self.uses_learning_phase:
actor_inputs += [K.learning_phase()]
self.actor_train_fn = K.function(actor_inputs, actor_outputs, updates=updates)
self.actor_optimizer = actor_optimizer
self.compiled = True
def load_weights(self, filepath):
filename, extension = os.path.splitext(filepath)
actor_filepath = filename + '_actor' + extension
critic_filepath = filename + '_critic' + extension
self.actor.load_weights(actor_filepath)
self.critic.load_weights(critic_filepath)
self.update_target_models_hard()
def save_weights(self, filepath, overwrite=False):
filename, extension = os.path.splitext(filepath)
actor_filepath = filename + '_actor' + extension
critic_filepath = filename + '_critic' + extension
self.actor.save_weights(actor_filepath, overwrite=overwrite)
self.critic.save_weights(critic_filepath, overwrite=overwrite)
def update_target_models_hard(self):
self.target_critic.set_weights(self.critic.get_weights())
self.target_actor.set_weights(self.actor.get_weights())
# TODO: implement pickle
def reset_states(self):
if self.random_process is not None:
self.random_process.reset_states()
self.recent_action = None
self.recent_observation = None
if self.compiled:
self.actor.reset_states()
self.critic.reset_states()
self.target_actor.reset_states()
self.target_critic.reset_states()
def process_state_batch(self, batch):
batch = np.array(batch)
if self.processor is None:
return batch
return self.processor.process_state_batch(batch)
def select_action(self, state):
batch = self.process_state_batch([state])
# actions = [action.flatten() for action in self.actor.predict_on_batch(batch)]
actions = self.actor.predict_on_batch(batch)
if self.nb_players == 1:
actions =[actions]
# actions = [a.flatten() for a in actions]
assert len(actions) == self.nb_players
# assert actions[0].shape == (self.nb_actions,)
assert actions[0].shape == (1, self.nb_actions)
# print('actions: {}'.format(actions))
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
state_batch_with_action = batch[:]
else:
state_batch_with_action = [batch]
for action_idx, input_idx in enumerate(self.critic_action_input_idxes):
state_batch_with_action.insert(input_idx, actions[action_idx])
q_values = [
qv.flatten()
for qv in self.critic.predict_on_batch(state_batch_with_action)
]
assert q_values[0].shape == (1, )
assert len(q_values) == self.nb_players
# print('q_values: {}'.format(q_values))
action_best = actions[np.argmax(q_values)].flatten()
# assert action_best.shape == (self.nb_actions, )
assert action_best.shape == (self.nb_actions, )
# print('action_best: {}'.format(action_best))
# print(type(action_best[0]))
# Apply noise, if a random process is set.
if self.training and self.random_process is not None:
noise = self.random_process.sample()
assert noise.shape == action_best.shape
action_best += noise
return action_best
def forward(self, observation):
# Select an action.
state = self.memory.get_recent_state(observation)
action = self.select_action(state) # TODO: move this into policy
# Book-keeping.
self.recent_observation = observation
self.recent_action = action
return action
@property
def layers(self):
return self.actor.layers[:] + self.critic.layers[:]
@property
def metrics_names(self):
names = self.critic.metrics_names[:]
if self.processor is not None:
names += self.processor.metrics_names[:]
return names
def backward(self, reward, terminal=False):
# Store most recent experience in memory.
if self.step % self.memory_interval == 0:
self.memory.append(self.recent_observation, self.recent_action, reward, terminal,
training=self.training)
metrics = [np.nan for _ in self.metrics_names]
if not self.training:
# We're done here. No need to update the experience memory since we only use the working
# memory to obtain the state over the most recent observations.
return metrics
# Train the network on a single stochastic batch.
can_train_either = self.step > self.nb_steps_warmup_critic or self.step > self.nb_steps_warmup_actor
if can_train_either and self.step % self.train_interval == 0:
experiences = self.memory.sample(self.batch_size)
assert len(experiences) == self.batch_size
# Start by extracting the necessary parameters (we use a vectorized implementation).
state0_batch = []
reward_batch = []
action_batch = []
terminal1_batch = []
state1_batch = []
for e in experiences:
state0_batch.append(e.state0)
state1_batch.append(e.state1)
reward_batch.append(e.reward)
action_batch.append(e.action)
terminal1_batch.append(0. if e.terminal1 else 1.)
# Prepare and validate parameters.
state0_batch = self.process_state_batch(state0_batch)
state1_batch = self.process_state_batch(state1_batch)
terminal1_batch = np.array(terminal1_batch)
reward_batch = np.array(reward_batch)
action_batch = np.array(action_batch)
assert reward_batch.shape == (self.batch_size,)
assert terminal1_batch.shape == reward_batch.shape
assert action_batch.shape == (self.batch_size, self.nb_actions)
# Update critic, if warm up is over.
if self.step > self.nb_steps_warmup_critic:
target_actions = self.target_actor.predict_on_batch(state1_batch)
if not isinstance(target_actions, (list,)):
target_actions = [target_actions]
assert len(target_actions) == self.nb_players
assert target_actions[0].shape == (self.batch_size, self.nb_actions)
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
# if len(self.critic.inputs) >= 3:
state1_batch_with_action = state1_batch[:]
else:
state1_batch_with_action = [state1_batch]
# state1_batch_with_action.insert(self.critic_action_input_idx, target_actions)
for action_idx, input_idx in enumerate(self.critic_action_input_idxes):
state1_batch_with_action.insert(input_idx, target_actions[action_idx])
target_q_values = self.target_critic.predict_on_batch(state1_batch_with_action)
if not isinstance(target_q_values, (list,)):
target_q_values = [target_q_values]
target_q_values = [ tqv.flatten() for tqv in target_q_values]
assert target_q_values[0].shape == reward_batch.shape
assert len(target_q_values) == self.nb_players
# Compute r_t + gamma * Q(s_t+1, mu(s_t+1)) and update the target ys accordingly,
# but only for the affected output units (as given by action_batch).
discounted_reward_batch = [
self.gamma * terminal1_batch * tqv
for tqv in target_q_values
]
assert discounted_reward_batch[0].shape == reward_batch.shape
targets = [reward_batch + drb for drb in discounted_reward_batch] # .reshape(self.batch_size, 1)
assert targets[0].shape == reward_batch.shape
assert len(targets) == self.nb_players
# Perform a single batch update on the critic network.
# if len(self.critic.inputs) >= 3:
if len(self.critic.inputs) > (self.nb_players+1): # state is a list
state0_batch_with_action = state0_batch[:]
else:
state0_batch_with_action = [state0_batch]
for input_idx in self.critic_action_input_idxes:
state0_batch_with_action.insert(input_idx, action_batch)
# state0_batch_with_action.insert(self.critic_action_input_idx, action_batch)
metrics = self.critic.train_on_batch(
state0_batch_with_action,
targets)
if self.processor is not None:
metrics += self.processor.metrics
# q_values = self.critic.predict_on_batch(state0_batch_with_action)
# if not isinstance(q_values, (list,)):
# q_values = [q_values]
# q_values = [ qv.flatten() for qv in q_values]
# print('gamma: {}'.format(self.gamma))
# print('terminal1_batch: {}'.format(terminal1_batch))
# print('target_q_values: {}'.format(target_q_values))
# print('discounted_reward_batch: {}'.format(discounted_reward_batch))
# print('reward_batch: {}'.format(reward_batch))
# print('targets: {}'.format(targets))
# print('current q values: {}'.format(q_values))
# Update actor, if warm up is over.
if self.step > self.nb_steps_warmup_actor:
# TODO: implement metrics for actor
if len(self.actor.inputs) >= 2:
inputs = state0_batch[:]
else:
inputs = [state0_batch]
if self.uses_learning_phase:
inputs += [self.training]
action_values = self.actor_train_fn(inputs)
assert len(action_values) == self.nb_players
assert action_values[0].shape == (self.batch_size, self.nb_actions)
if self.target_model_update >= 1 and self.step % self.target_model_update == 0:
self.update_target_models_hard()
return metrics
|
normal
|
{
"blob_id": "a2fe62b6bbb6b753ef6aec6f44758b8aceeeafe6",
"index": 9691,
"step-1": "<mask token>\n\n\nclass UBDDPGAgent(Agent):\n <mask token>\n <mask token>\n <mask token>\n\n def compile(self, optimizer, metrics=[]):\n metrics += [mean_q]\n if type(optimizer) in (list, tuple):\n if len(optimizer) != 2:\n raise ValueError(\n 'More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.'\n )\n actor_optimizer, critic_optimizer = optimizer\n else:\n actor_optimizer = optimizer\n critic_optimizer = clone_optimizer(optimizer)\n if type(actor_optimizer) is str:\n actor_optimizer = optimizers.get(actor_optimizer)\n if type(critic_optimizer) is str:\n critic_optimizer = optimizers.get(critic_optimizer)\n assert actor_optimizer != critic_optimizer\n if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(\n metrics[1], '__len__'):\n actor_metrics, critic_metrics = metrics\n else:\n actor_metrics = critic_metrics = metrics\n\n def clipped_error(y_true, y_pred):\n y_true = K.squeeze(y_true, axis=-1)\n y_pred = K.squeeze(y_pred, axis=-1)\n loss = K.mean(huber_loss(y_true, y_pred, self.delta_clip), axis=-1)\n return loss\n self.target_actor = clone_model(self.actor, self.custom_model_objects)\n self.target_actor.compile(optimizer='sgd', loss='mse')\n self.target_critic = clone_model(self.critic, self.custom_model_objects\n )\n self.target_critic.compile(optimizer='sgd', loss='mse')\n self.actor.compile(optimizer='sgd', loss='mse')\n if self.target_model_update < 1.0:\n critic_updates = get_soft_target_model_updates(self.\n target_critic, self.critic, self.target_model_update)\n critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer,\n critic_updates)\n self.critic.compile(optimizer=critic_optimizer, loss=[clipped_error\n ] * self.nb_players, metrics=critic_metrics)\n critic_inputs = []\n actor_inputs = []\n for i in self.critic.input:\n if i in self.critic_action_inputs:\n critic_inputs.append([])\n else:\n critic_inputs.append(i)\n actor_inputs.append(i)\n actor_outputs = self.actor(actor_inputs)\n if not isinstance(actor_outputs, (list,)):\n actor_outputs = [actor_outputs]\n assert len(actor_outputs) == self.nb_players\n for input_idx, actor_output in zip(self.critic_action_input_idxes,\n actor_outputs):\n critic_inputs[input_idx] = actor_output\n critic_outputs = self.critic(critic_inputs)\n if not isinstance(critic_outputs, (list,)):\n critic_outputs = [critic_outputs]\n assert len(critic_outputs) == self.nb_players\n actor_losses = [None] * self.nb_players\n for input_idx, critic_output in zip(self.critic_action_input_idxes,\n critic_outputs):\n actor_losses[input_idx] = -K.mean(critic_output)\n updates = actor_optimizer.get_updates(params=self.actor.\n trainable_weights, loss=actor_losses)\n if self.target_model_update < 1.0:\n updates += get_soft_target_model_updates(self.target_actor,\n self.actor, self.target_model_update)\n updates += self.actor.updates\n if K.backend() == 'tensorflow':\n self.actor_train_fn = K.function(actor_inputs + [K.\n learning_phase()], actor_outputs, updates=updates)\n else:\n if self.uses_learning_phase:\n actor_inputs += [K.learning_phase()]\n self.actor_train_fn = K.function(actor_inputs, actor_outputs,\n updates=updates)\n self.actor_optimizer = actor_optimizer\n self.compiled = True\n <mask token>\n <mask token>\n\n def update_target_models_hard(self):\n self.target_critic.set_weights(self.critic.get_weights())\n self.target_actor.set_weights(self.actor.get_weights())\n\n def reset_states(self):\n if self.random_process is not None:\n self.random_process.reset_states()\n self.recent_action = None\n self.recent_observation = None\n if self.compiled:\n self.actor.reset_states()\n self.critic.reset_states()\n self.target_actor.reset_states()\n self.target_critic.reset_states()\n\n def process_state_batch(self, batch):\n batch = np.array(batch)\n if self.processor is None:\n return batch\n return self.processor.process_state_batch(batch)\n\n def select_action(self, state):\n batch = self.process_state_batch([state])\n actions = self.actor.predict_on_batch(batch)\n if self.nb_players == 1:\n actions = [actions]\n assert len(actions) == self.nb_players\n assert actions[0].shape == (1, self.nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state_batch_with_action = batch[:]\n else:\n state_batch_with_action = [batch]\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state_batch_with_action.insert(input_idx, actions[action_idx])\n q_values = [qv.flatten() for qv in self.critic.predict_on_batch(\n state_batch_with_action)]\n assert q_values[0].shape == (1,)\n assert len(q_values) == self.nb_players\n action_best = actions[np.argmax(q_values)].flatten()\n assert action_best.shape == (self.nb_actions,)\n if self.training and self.random_process is not None:\n noise = self.random_process.sample()\n assert noise.shape == action_best.shape\n action_best += noise\n return action_best\n <mask token>\n\n @property\n def layers(self):\n return self.actor.layers[:] + self.critic.layers[:]\n\n @property\n def metrics_names(self):\n names = self.critic.metrics_names[:]\n if self.processor is not None:\n names += self.processor.metrics_names[:]\n return names\n\n def backward(self, reward, terminal=False):\n if self.step % self.memory_interval == 0:\n self.memory.append(self.recent_observation, self.recent_action,\n reward, terminal, training=self.training)\n metrics = [np.nan for _ in self.metrics_names]\n if not self.training:\n return metrics\n can_train_either = (self.step > self.nb_steps_warmup_critic or self\n .step > self.nb_steps_warmup_actor)\n if can_train_either and self.step % self.train_interval == 0:\n experiences = self.memory.sample(self.batch_size)\n assert len(experiences) == self.batch_size\n state0_batch = []\n reward_batch = []\n action_batch = []\n terminal1_batch = []\n state1_batch = []\n for e in experiences:\n state0_batch.append(e.state0)\n state1_batch.append(e.state1)\n reward_batch.append(e.reward)\n action_batch.append(e.action)\n terminal1_batch.append(0.0 if e.terminal1 else 1.0)\n state0_batch = self.process_state_batch(state0_batch)\n state1_batch = self.process_state_batch(state1_batch)\n terminal1_batch = np.array(terminal1_batch)\n reward_batch = np.array(reward_batch)\n action_batch = np.array(action_batch)\n assert reward_batch.shape == (self.batch_size,)\n assert terminal1_batch.shape == reward_batch.shape\n assert action_batch.shape == (self.batch_size, self.nb_actions)\n if self.step > self.nb_steps_warmup_critic:\n target_actions = self.target_actor.predict_on_batch(\n state1_batch)\n if not isinstance(target_actions, (list,)):\n target_actions = [target_actions]\n assert len(target_actions) == self.nb_players\n assert target_actions[0].shape == (self.batch_size, self.\n nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state1_batch_with_action = state1_batch[:]\n else:\n state1_batch_with_action = [state1_batch]\n for action_idx, input_idx in enumerate(self.\n critic_action_input_idxes):\n state1_batch_with_action.insert(input_idx,\n target_actions[action_idx])\n target_q_values = self.target_critic.predict_on_batch(\n state1_batch_with_action)\n if not isinstance(target_q_values, (list,)):\n target_q_values = [target_q_values]\n target_q_values = [tqv.flatten() for tqv in target_q_values]\n assert target_q_values[0].shape == reward_batch.shape\n assert len(target_q_values) == self.nb_players\n discounted_reward_batch = [(self.gamma * terminal1_batch *\n tqv) for tqv in target_q_values]\n assert discounted_reward_batch[0].shape == reward_batch.shape\n targets = [(reward_batch + drb) for drb in\n discounted_reward_batch]\n assert targets[0].shape == reward_batch.shape\n assert len(targets) == self.nb_players\n if len(self.critic.inputs) > self.nb_players + 1:\n state0_batch_with_action = state0_batch[:]\n else:\n state0_batch_with_action = [state0_batch]\n for input_idx in self.critic_action_input_idxes:\n state0_batch_with_action.insert(input_idx, action_batch)\n metrics = self.critic.train_on_batch(state0_batch_with_action,\n targets)\n if self.processor is not None:\n metrics += self.processor.metrics\n if self.step > self.nb_steps_warmup_actor:\n if len(self.actor.inputs) >= 2:\n inputs = state0_batch[:]\n else:\n inputs = [state0_batch]\n if self.uses_learning_phase:\n inputs += [self.training]\n action_values = self.actor_train_fn(inputs)\n assert len(action_values) == self.nb_players\n assert action_values[0].shape == (self.batch_size, self.\n nb_actions)\n if (self.target_model_update >= 1 and self.step % self.\n target_model_update == 0):\n self.update_target_models_hard()\n return metrics\n",
"step-2": "<mask token>\n\n\nclass UBDDPGAgent(Agent):\n <mask token>\n\n def __init__(self, nb_actions, actor, critic, nb_players,\n critic_action_inputs, memory, gamma=0.99, batch_size=32,\n nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,\n train_interval=1, memory_interval=1, delta_range=None, delta_clip=\n np.inf, random_process=None, custom_model_objects={},\n target_model_update=0.001, **kwargs):\n assert len(critic_action_inputs) == nb_players\n if hasattr(actor.output, '__len__') and len(actor.output\n ) != nb_players:\n raise ValueError((\n 'Actor \"{}\" does not have the right number of ',\n 'outputs. DDPG expects an actor that has {} outputs.').\n format(actor, nb_players))\n for critic_action_input in critic_action_inputs:\n if critic_action_input not in critic.input:\n raise ValueError(\n 'Critic \"{}\" does not have designated action input \"{}\".'\n .format(critic, critic_action_input))\n if not hasattr(critic.input, '__len__') or len(critic.input) < 2:\n raise ValueError(\n 'Critic \"{}\" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'\n .format(critic))\n super(UBDDPGAgent, self).__init__(**kwargs)\n if target_model_update < 0:\n raise ValueError('`target_model_update` must be >= 0.')\n elif target_model_update >= 1:\n target_model_update = int(target_model_update)\n else:\n target_model_update = float(target_model_update)\n if delta_range is not None:\n warnings.warn(\n \"`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we're falling back to `delta_range[1] = {}`\"\n .format(delta_range[1]))\n delta_clip = delta_range[1]\n self.nb_actions = nb_actions\n self.nb_steps_warmup_actor = nb_steps_warmup_actor\n self.nb_steps_warmup_critic = nb_steps_warmup_critic\n self.random_process = random_process\n self.delta_clip = delta_clip\n self.gamma = gamma\n self.target_model_update = target_model_update\n self.batch_size = batch_size\n self.train_interval = train_interval\n self.memory_interval = memory_interval\n self.custom_model_objects = custom_model_objects\n self.actor = actor\n self.critic = critic\n self.nb_players = nb_players\n self.critic_action_inputs = critic_action_inputs\n self.critic_action_input_idxes = [self.critic.input.index(\n critic_action_input) for critic_action_input in\n critic_action_inputs]\n self.memory = memory\n self.compiled = False\n self.reset_states()\n <mask token>\n\n def compile(self, optimizer, metrics=[]):\n metrics += [mean_q]\n if type(optimizer) in (list, tuple):\n if len(optimizer) != 2:\n raise ValueError(\n 'More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.'\n )\n actor_optimizer, critic_optimizer = optimizer\n else:\n actor_optimizer = optimizer\n critic_optimizer = clone_optimizer(optimizer)\n if type(actor_optimizer) is str:\n actor_optimizer = optimizers.get(actor_optimizer)\n if type(critic_optimizer) is str:\n critic_optimizer = optimizers.get(critic_optimizer)\n assert actor_optimizer != critic_optimizer\n if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(\n metrics[1], '__len__'):\n actor_metrics, critic_metrics = metrics\n else:\n actor_metrics = critic_metrics = metrics\n\n def clipped_error(y_true, y_pred):\n y_true = K.squeeze(y_true, axis=-1)\n y_pred = K.squeeze(y_pred, axis=-1)\n loss = K.mean(huber_loss(y_true, y_pred, self.delta_clip), axis=-1)\n return loss\n self.target_actor = clone_model(self.actor, self.custom_model_objects)\n self.target_actor.compile(optimizer='sgd', loss='mse')\n self.target_critic = clone_model(self.critic, self.custom_model_objects\n )\n self.target_critic.compile(optimizer='sgd', loss='mse')\n self.actor.compile(optimizer='sgd', loss='mse')\n if self.target_model_update < 1.0:\n critic_updates = get_soft_target_model_updates(self.\n target_critic, self.critic, self.target_model_update)\n critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer,\n critic_updates)\n self.critic.compile(optimizer=critic_optimizer, loss=[clipped_error\n ] * self.nb_players, metrics=critic_metrics)\n critic_inputs = []\n actor_inputs = []\n for i in self.critic.input:\n if i in self.critic_action_inputs:\n critic_inputs.append([])\n else:\n critic_inputs.append(i)\n actor_inputs.append(i)\n actor_outputs = self.actor(actor_inputs)\n if not isinstance(actor_outputs, (list,)):\n actor_outputs = [actor_outputs]\n assert len(actor_outputs) == self.nb_players\n for input_idx, actor_output in zip(self.critic_action_input_idxes,\n actor_outputs):\n critic_inputs[input_idx] = actor_output\n critic_outputs = self.critic(critic_inputs)\n if not isinstance(critic_outputs, (list,)):\n critic_outputs = [critic_outputs]\n assert len(critic_outputs) == self.nb_players\n actor_losses = [None] * self.nb_players\n for input_idx, critic_output in zip(self.critic_action_input_idxes,\n critic_outputs):\n actor_losses[input_idx] = -K.mean(critic_output)\n updates = actor_optimizer.get_updates(params=self.actor.\n trainable_weights, loss=actor_losses)\n if self.target_model_update < 1.0:\n updates += get_soft_target_model_updates(self.target_actor,\n self.actor, self.target_model_update)\n updates += self.actor.updates\n if K.backend() == 'tensorflow':\n self.actor_train_fn = K.function(actor_inputs + [K.\n learning_phase()], actor_outputs, updates=updates)\n else:\n if self.uses_learning_phase:\n actor_inputs += [K.learning_phase()]\n self.actor_train_fn = K.function(actor_inputs, actor_outputs,\n updates=updates)\n self.actor_optimizer = actor_optimizer\n self.compiled = True\n\n def load_weights(self, filepath):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.load_weights(actor_filepath)\n self.critic.load_weights(critic_filepath)\n self.update_target_models_hard()\n <mask token>\n\n def update_target_models_hard(self):\n self.target_critic.set_weights(self.critic.get_weights())\n self.target_actor.set_weights(self.actor.get_weights())\n\n def reset_states(self):\n if self.random_process is not None:\n self.random_process.reset_states()\n self.recent_action = None\n self.recent_observation = None\n if self.compiled:\n self.actor.reset_states()\n self.critic.reset_states()\n self.target_actor.reset_states()\n self.target_critic.reset_states()\n\n def process_state_batch(self, batch):\n batch = np.array(batch)\n if self.processor is None:\n return batch\n return self.processor.process_state_batch(batch)\n\n def select_action(self, state):\n batch = self.process_state_batch([state])\n actions = self.actor.predict_on_batch(batch)\n if self.nb_players == 1:\n actions = [actions]\n assert len(actions) == self.nb_players\n assert actions[0].shape == (1, self.nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state_batch_with_action = batch[:]\n else:\n state_batch_with_action = [batch]\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state_batch_with_action.insert(input_idx, actions[action_idx])\n q_values = [qv.flatten() for qv in self.critic.predict_on_batch(\n state_batch_with_action)]\n assert q_values[0].shape == (1,)\n assert len(q_values) == self.nb_players\n action_best = actions[np.argmax(q_values)].flatten()\n assert action_best.shape == (self.nb_actions,)\n if self.training and self.random_process is not None:\n noise = self.random_process.sample()\n assert noise.shape == action_best.shape\n action_best += noise\n return action_best\n\n def forward(self, observation):\n state = self.memory.get_recent_state(observation)\n action = self.select_action(state)\n self.recent_observation = observation\n self.recent_action = action\n return action\n\n @property\n def layers(self):\n return self.actor.layers[:] + self.critic.layers[:]\n\n @property\n def metrics_names(self):\n names = self.critic.metrics_names[:]\n if self.processor is not None:\n names += self.processor.metrics_names[:]\n return names\n\n def backward(self, reward, terminal=False):\n if self.step % self.memory_interval == 0:\n self.memory.append(self.recent_observation, self.recent_action,\n reward, terminal, training=self.training)\n metrics = [np.nan for _ in self.metrics_names]\n if not self.training:\n return metrics\n can_train_either = (self.step > self.nb_steps_warmup_critic or self\n .step > self.nb_steps_warmup_actor)\n if can_train_either and self.step % self.train_interval == 0:\n experiences = self.memory.sample(self.batch_size)\n assert len(experiences) == self.batch_size\n state0_batch = []\n reward_batch = []\n action_batch = []\n terminal1_batch = []\n state1_batch = []\n for e in experiences:\n state0_batch.append(e.state0)\n state1_batch.append(e.state1)\n reward_batch.append(e.reward)\n action_batch.append(e.action)\n terminal1_batch.append(0.0 if e.terminal1 else 1.0)\n state0_batch = self.process_state_batch(state0_batch)\n state1_batch = self.process_state_batch(state1_batch)\n terminal1_batch = np.array(terminal1_batch)\n reward_batch = np.array(reward_batch)\n action_batch = np.array(action_batch)\n assert reward_batch.shape == (self.batch_size,)\n assert terminal1_batch.shape == reward_batch.shape\n assert action_batch.shape == (self.batch_size, self.nb_actions)\n if self.step > self.nb_steps_warmup_critic:\n target_actions = self.target_actor.predict_on_batch(\n state1_batch)\n if not isinstance(target_actions, (list,)):\n target_actions = [target_actions]\n assert len(target_actions) == self.nb_players\n assert target_actions[0].shape == (self.batch_size, self.\n nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state1_batch_with_action = state1_batch[:]\n else:\n state1_batch_with_action = [state1_batch]\n for action_idx, input_idx in enumerate(self.\n critic_action_input_idxes):\n state1_batch_with_action.insert(input_idx,\n target_actions[action_idx])\n target_q_values = self.target_critic.predict_on_batch(\n state1_batch_with_action)\n if not isinstance(target_q_values, (list,)):\n target_q_values = [target_q_values]\n target_q_values = [tqv.flatten() for tqv in target_q_values]\n assert target_q_values[0].shape == reward_batch.shape\n assert len(target_q_values) == self.nb_players\n discounted_reward_batch = [(self.gamma * terminal1_batch *\n tqv) for tqv in target_q_values]\n assert discounted_reward_batch[0].shape == reward_batch.shape\n targets = [(reward_batch + drb) for drb in\n discounted_reward_batch]\n assert targets[0].shape == reward_batch.shape\n assert len(targets) == self.nb_players\n if len(self.critic.inputs) > self.nb_players + 1:\n state0_batch_with_action = state0_batch[:]\n else:\n state0_batch_with_action = [state0_batch]\n for input_idx in self.critic_action_input_idxes:\n state0_batch_with_action.insert(input_idx, action_batch)\n metrics = self.critic.train_on_batch(state0_batch_with_action,\n targets)\n if self.processor is not None:\n metrics += self.processor.metrics\n if self.step > self.nb_steps_warmup_actor:\n if len(self.actor.inputs) >= 2:\n inputs = state0_batch[:]\n else:\n inputs = [state0_batch]\n if self.uses_learning_phase:\n inputs += [self.training]\n action_values = self.actor_train_fn(inputs)\n assert len(action_values) == self.nb_players\n assert action_values[0].shape == (self.batch_size, self.\n nb_actions)\n if (self.target_model_update >= 1 and self.step % self.\n target_model_update == 0):\n self.update_target_models_hard()\n return metrics\n",
"step-3": "<mask token>\n\n\nclass UBDDPGAgent(Agent):\n \"\"\"Write me\n \"\"\"\n\n def __init__(self, nb_actions, actor, critic, nb_players,\n critic_action_inputs, memory, gamma=0.99, batch_size=32,\n nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,\n train_interval=1, memory_interval=1, delta_range=None, delta_clip=\n np.inf, random_process=None, custom_model_objects={},\n target_model_update=0.001, **kwargs):\n assert len(critic_action_inputs) == nb_players\n if hasattr(actor.output, '__len__') and len(actor.output\n ) != nb_players:\n raise ValueError((\n 'Actor \"{}\" does not have the right number of ',\n 'outputs. DDPG expects an actor that has {} outputs.').\n format(actor, nb_players))\n for critic_action_input in critic_action_inputs:\n if critic_action_input not in critic.input:\n raise ValueError(\n 'Critic \"{}\" does not have designated action input \"{}\".'\n .format(critic, critic_action_input))\n if not hasattr(critic.input, '__len__') or len(critic.input) < 2:\n raise ValueError(\n 'Critic \"{}\" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'\n .format(critic))\n super(UBDDPGAgent, self).__init__(**kwargs)\n if target_model_update < 0:\n raise ValueError('`target_model_update` must be >= 0.')\n elif target_model_update >= 1:\n target_model_update = int(target_model_update)\n else:\n target_model_update = float(target_model_update)\n if delta_range is not None:\n warnings.warn(\n \"`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we're falling back to `delta_range[1] = {}`\"\n .format(delta_range[1]))\n delta_clip = delta_range[1]\n self.nb_actions = nb_actions\n self.nb_steps_warmup_actor = nb_steps_warmup_actor\n self.nb_steps_warmup_critic = nb_steps_warmup_critic\n self.random_process = random_process\n self.delta_clip = delta_clip\n self.gamma = gamma\n self.target_model_update = target_model_update\n self.batch_size = batch_size\n self.train_interval = train_interval\n self.memory_interval = memory_interval\n self.custom_model_objects = custom_model_objects\n self.actor = actor\n self.critic = critic\n self.nb_players = nb_players\n self.critic_action_inputs = critic_action_inputs\n self.critic_action_input_idxes = [self.critic.input.index(\n critic_action_input) for critic_action_input in\n critic_action_inputs]\n self.memory = memory\n self.compiled = False\n self.reset_states()\n\n @property\n def uses_learning_phase(self):\n return (self.actor.uses_learning_phase or self.critic.\n uses_learning_phase)\n\n def compile(self, optimizer, metrics=[]):\n metrics += [mean_q]\n if type(optimizer) in (list, tuple):\n if len(optimizer) != 2:\n raise ValueError(\n 'More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.'\n )\n actor_optimizer, critic_optimizer = optimizer\n else:\n actor_optimizer = optimizer\n critic_optimizer = clone_optimizer(optimizer)\n if type(actor_optimizer) is str:\n actor_optimizer = optimizers.get(actor_optimizer)\n if type(critic_optimizer) is str:\n critic_optimizer = optimizers.get(critic_optimizer)\n assert actor_optimizer != critic_optimizer\n if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(\n metrics[1], '__len__'):\n actor_metrics, critic_metrics = metrics\n else:\n actor_metrics = critic_metrics = metrics\n\n def clipped_error(y_true, y_pred):\n y_true = K.squeeze(y_true, axis=-1)\n y_pred = K.squeeze(y_pred, axis=-1)\n loss = K.mean(huber_loss(y_true, y_pred, self.delta_clip), axis=-1)\n return loss\n self.target_actor = clone_model(self.actor, self.custom_model_objects)\n self.target_actor.compile(optimizer='sgd', loss='mse')\n self.target_critic = clone_model(self.critic, self.custom_model_objects\n )\n self.target_critic.compile(optimizer='sgd', loss='mse')\n self.actor.compile(optimizer='sgd', loss='mse')\n if self.target_model_update < 1.0:\n critic_updates = get_soft_target_model_updates(self.\n target_critic, self.critic, self.target_model_update)\n critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer,\n critic_updates)\n self.critic.compile(optimizer=critic_optimizer, loss=[clipped_error\n ] * self.nb_players, metrics=critic_metrics)\n critic_inputs = []\n actor_inputs = []\n for i in self.critic.input:\n if i in self.critic_action_inputs:\n critic_inputs.append([])\n else:\n critic_inputs.append(i)\n actor_inputs.append(i)\n actor_outputs = self.actor(actor_inputs)\n if not isinstance(actor_outputs, (list,)):\n actor_outputs = [actor_outputs]\n assert len(actor_outputs) == self.nb_players\n for input_idx, actor_output in zip(self.critic_action_input_idxes,\n actor_outputs):\n critic_inputs[input_idx] = actor_output\n critic_outputs = self.critic(critic_inputs)\n if not isinstance(critic_outputs, (list,)):\n critic_outputs = [critic_outputs]\n assert len(critic_outputs) == self.nb_players\n actor_losses = [None] * self.nb_players\n for input_idx, critic_output in zip(self.critic_action_input_idxes,\n critic_outputs):\n actor_losses[input_idx] = -K.mean(critic_output)\n updates = actor_optimizer.get_updates(params=self.actor.\n trainable_weights, loss=actor_losses)\n if self.target_model_update < 1.0:\n updates += get_soft_target_model_updates(self.target_actor,\n self.actor, self.target_model_update)\n updates += self.actor.updates\n if K.backend() == 'tensorflow':\n self.actor_train_fn = K.function(actor_inputs + [K.\n learning_phase()], actor_outputs, updates=updates)\n else:\n if self.uses_learning_phase:\n actor_inputs += [K.learning_phase()]\n self.actor_train_fn = K.function(actor_inputs, actor_outputs,\n updates=updates)\n self.actor_optimizer = actor_optimizer\n self.compiled = True\n\n def load_weights(self, filepath):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.load_weights(actor_filepath)\n self.critic.load_weights(critic_filepath)\n self.update_target_models_hard()\n\n def save_weights(self, filepath, overwrite=False):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.save_weights(actor_filepath, overwrite=overwrite)\n self.critic.save_weights(critic_filepath, overwrite=overwrite)\n\n def update_target_models_hard(self):\n self.target_critic.set_weights(self.critic.get_weights())\n self.target_actor.set_weights(self.actor.get_weights())\n\n def reset_states(self):\n if self.random_process is not None:\n self.random_process.reset_states()\n self.recent_action = None\n self.recent_observation = None\n if self.compiled:\n self.actor.reset_states()\n self.critic.reset_states()\n self.target_actor.reset_states()\n self.target_critic.reset_states()\n\n def process_state_batch(self, batch):\n batch = np.array(batch)\n if self.processor is None:\n return batch\n return self.processor.process_state_batch(batch)\n\n def select_action(self, state):\n batch = self.process_state_batch([state])\n actions = self.actor.predict_on_batch(batch)\n if self.nb_players == 1:\n actions = [actions]\n assert len(actions) == self.nb_players\n assert actions[0].shape == (1, self.nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state_batch_with_action = batch[:]\n else:\n state_batch_with_action = [batch]\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state_batch_with_action.insert(input_idx, actions[action_idx])\n q_values = [qv.flatten() for qv in self.critic.predict_on_batch(\n state_batch_with_action)]\n assert q_values[0].shape == (1,)\n assert len(q_values) == self.nb_players\n action_best = actions[np.argmax(q_values)].flatten()\n assert action_best.shape == (self.nb_actions,)\n if self.training and self.random_process is not None:\n noise = self.random_process.sample()\n assert noise.shape == action_best.shape\n action_best += noise\n return action_best\n\n def forward(self, observation):\n state = self.memory.get_recent_state(observation)\n action = self.select_action(state)\n self.recent_observation = observation\n self.recent_action = action\n return action\n\n @property\n def layers(self):\n return self.actor.layers[:] + self.critic.layers[:]\n\n @property\n def metrics_names(self):\n names = self.critic.metrics_names[:]\n if self.processor is not None:\n names += self.processor.metrics_names[:]\n return names\n\n def backward(self, reward, terminal=False):\n if self.step % self.memory_interval == 0:\n self.memory.append(self.recent_observation, self.recent_action,\n reward, terminal, training=self.training)\n metrics = [np.nan for _ in self.metrics_names]\n if not self.training:\n return metrics\n can_train_either = (self.step > self.nb_steps_warmup_critic or self\n .step > self.nb_steps_warmup_actor)\n if can_train_either and self.step % self.train_interval == 0:\n experiences = self.memory.sample(self.batch_size)\n assert len(experiences) == self.batch_size\n state0_batch = []\n reward_batch = []\n action_batch = []\n terminal1_batch = []\n state1_batch = []\n for e in experiences:\n state0_batch.append(e.state0)\n state1_batch.append(e.state1)\n reward_batch.append(e.reward)\n action_batch.append(e.action)\n terminal1_batch.append(0.0 if e.terminal1 else 1.0)\n state0_batch = self.process_state_batch(state0_batch)\n state1_batch = self.process_state_batch(state1_batch)\n terminal1_batch = np.array(terminal1_batch)\n reward_batch = np.array(reward_batch)\n action_batch = np.array(action_batch)\n assert reward_batch.shape == (self.batch_size,)\n assert terminal1_batch.shape == reward_batch.shape\n assert action_batch.shape == (self.batch_size, self.nb_actions)\n if self.step > self.nb_steps_warmup_critic:\n target_actions = self.target_actor.predict_on_batch(\n state1_batch)\n if not isinstance(target_actions, (list,)):\n target_actions = [target_actions]\n assert len(target_actions) == self.nb_players\n assert target_actions[0].shape == (self.batch_size, self.\n nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state1_batch_with_action = state1_batch[:]\n else:\n state1_batch_with_action = [state1_batch]\n for action_idx, input_idx in enumerate(self.\n critic_action_input_idxes):\n state1_batch_with_action.insert(input_idx,\n target_actions[action_idx])\n target_q_values = self.target_critic.predict_on_batch(\n state1_batch_with_action)\n if not isinstance(target_q_values, (list,)):\n target_q_values = [target_q_values]\n target_q_values = [tqv.flatten() for tqv in target_q_values]\n assert target_q_values[0].shape == reward_batch.shape\n assert len(target_q_values) == self.nb_players\n discounted_reward_batch = [(self.gamma * terminal1_batch *\n tqv) for tqv in target_q_values]\n assert discounted_reward_batch[0].shape == reward_batch.shape\n targets = [(reward_batch + drb) for drb in\n discounted_reward_batch]\n assert targets[0].shape == reward_batch.shape\n assert len(targets) == self.nb_players\n if len(self.critic.inputs) > self.nb_players + 1:\n state0_batch_with_action = state0_batch[:]\n else:\n state0_batch_with_action = [state0_batch]\n for input_idx in self.critic_action_input_idxes:\n state0_batch_with_action.insert(input_idx, action_batch)\n metrics = self.critic.train_on_batch(state0_batch_with_action,\n targets)\n if self.processor is not None:\n metrics += self.processor.metrics\n if self.step > self.nb_steps_warmup_actor:\n if len(self.actor.inputs) >= 2:\n inputs = state0_batch[:]\n else:\n inputs = [state0_batch]\n if self.uses_learning_phase:\n inputs += [self.training]\n action_values = self.actor_train_fn(inputs)\n assert len(action_values) == self.nb_players\n assert action_values[0].shape == (self.batch_size, self.\n nb_actions)\n if (self.target_model_update >= 1 and self.step % self.\n target_model_update == 0):\n self.update_target_models_hard()\n return metrics\n",
"step-4": "from __future__ import division\nfrom collections import deque\nimport os\nimport warnings\nimport numpy as np\nimport keras.backend as K\nimport keras.layers as layers\nimport keras.optimizers as optimizers\nfrom rl.core import Agent\nfrom rl.util import *\n\n\ndef mean_q(y_true, y_pred):\n return K.mean(K.max(y_pred, axis=-1))\n\n\nclass UBDDPGAgent(Agent):\n \"\"\"Write me\n \"\"\"\n\n def __init__(self, nb_actions, actor, critic, nb_players,\n critic_action_inputs, memory, gamma=0.99, batch_size=32,\n nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,\n train_interval=1, memory_interval=1, delta_range=None, delta_clip=\n np.inf, random_process=None, custom_model_objects={},\n target_model_update=0.001, **kwargs):\n assert len(critic_action_inputs) == nb_players\n if hasattr(actor.output, '__len__') and len(actor.output\n ) != nb_players:\n raise ValueError((\n 'Actor \"{}\" does not have the right number of ',\n 'outputs. DDPG expects an actor that has {} outputs.').\n format(actor, nb_players))\n for critic_action_input in critic_action_inputs:\n if critic_action_input not in critic.input:\n raise ValueError(\n 'Critic \"{}\" does not have designated action input \"{}\".'\n .format(critic, critic_action_input))\n if not hasattr(critic.input, '__len__') or len(critic.input) < 2:\n raise ValueError(\n 'Critic \"{}\" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'\n .format(critic))\n super(UBDDPGAgent, self).__init__(**kwargs)\n if target_model_update < 0:\n raise ValueError('`target_model_update` must be >= 0.')\n elif target_model_update >= 1:\n target_model_update = int(target_model_update)\n else:\n target_model_update = float(target_model_update)\n if delta_range is not None:\n warnings.warn(\n \"`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we're falling back to `delta_range[1] = {}`\"\n .format(delta_range[1]))\n delta_clip = delta_range[1]\n self.nb_actions = nb_actions\n self.nb_steps_warmup_actor = nb_steps_warmup_actor\n self.nb_steps_warmup_critic = nb_steps_warmup_critic\n self.random_process = random_process\n self.delta_clip = delta_clip\n self.gamma = gamma\n self.target_model_update = target_model_update\n self.batch_size = batch_size\n self.train_interval = train_interval\n self.memory_interval = memory_interval\n self.custom_model_objects = custom_model_objects\n self.actor = actor\n self.critic = critic\n self.nb_players = nb_players\n self.critic_action_inputs = critic_action_inputs\n self.critic_action_input_idxes = [self.critic.input.index(\n critic_action_input) for critic_action_input in\n critic_action_inputs]\n self.memory = memory\n self.compiled = False\n self.reset_states()\n\n @property\n def uses_learning_phase(self):\n return (self.actor.uses_learning_phase or self.critic.\n uses_learning_phase)\n\n def compile(self, optimizer, metrics=[]):\n metrics += [mean_q]\n if type(optimizer) in (list, tuple):\n if len(optimizer) != 2:\n raise ValueError(\n 'More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.'\n )\n actor_optimizer, critic_optimizer = optimizer\n else:\n actor_optimizer = optimizer\n critic_optimizer = clone_optimizer(optimizer)\n if type(actor_optimizer) is str:\n actor_optimizer = optimizers.get(actor_optimizer)\n if type(critic_optimizer) is str:\n critic_optimizer = optimizers.get(critic_optimizer)\n assert actor_optimizer != critic_optimizer\n if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(\n metrics[1], '__len__'):\n actor_metrics, critic_metrics = metrics\n else:\n actor_metrics = critic_metrics = metrics\n\n def clipped_error(y_true, y_pred):\n y_true = K.squeeze(y_true, axis=-1)\n y_pred = K.squeeze(y_pred, axis=-1)\n loss = K.mean(huber_loss(y_true, y_pred, self.delta_clip), axis=-1)\n return loss\n self.target_actor = clone_model(self.actor, self.custom_model_objects)\n self.target_actor.compile(optimizer='sgd', loss='mse')\n self.target_critic = clone_model(self.critic, self.custom_model_objects\n )\n self.target_critic.compile(optimizer='sgd', loss='mse')\n self.actor.compile(optimizer='sgd', loss='mse')\n if self.target_model_update < 1.0:\n critic_updates = get_soft_target_model_updates(self.\n target_critic, self.critic, self.target_model_update)\n critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer,\n critic_updates)\n self.critic.compile(optimizer=critic_optimizer, loss=[clipped_error\n ] * self.nb_players, metrics=critic_metrics)\n critic_inputs = []\n actor_inputs = []\n for i in self.critic.input:\n if i in self.critic_action_inputs:\n critic_inputs.append([])\n else:\n critic_inputs.append(i)\n actor_inputs.append(i)\n actor_outputs = self.actor(actor_inputs)\n if not isinstance(actor_outputs, (list,)):\n actor_outputs = [actor_outputs]\n assert len(actor_outputs) == self.nb_players\n for input_idx, actor_output in zip(self.critic_action_input_idxes,\n actor_outputs):\n critic_inputs[input_idx] = actor_output\n critic_outputs = self.critic(critic_inputs)\n if not isinstance(critic_outputs, (list,)):\n critic_outputs = [critic_outputs]\n assert len(critic_outputs) == self.nb_players\n actor_losses = [None] * self.nb_players\n for input_idx, critic_output in zip(self.critic_action_input_idxes,\n critic_outputs):\n actor_losses[input_idx] = -K.mean(critic_output)\n updates = actor_optimizer.get_updates(params=self.actor.\n trainable_weights, loss=actor_losses)\n if self.target_model_update < 1.0:\n updates += get_soft_target_model_updates(self.target_actor,\n self.actor, self.target_model_update)\n updates += self.actor.updates\n if K.backend() == 'tensorflow':\n self.actor_train_fn = K.function(actor_inputs + [K.\n learning_phase()], actor_outputs, updates=updates)\n else:\n if self.uses_learning_phase:\n actor_inputs += [K.learning_phase()]\n self.actor_train_fn = K.function(actor_inputs, actor_outputs,\n updates=updates)\n self.actor_optimizer = actor_optimizer\n self.compiled = True\n\n def load_weights(self, filepath):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.load_weights(actor_filepath)\n self.critic.load_weights(critic_filepath)\n self.update_target_models_hard()\n\n def save_weights(self, filepath, overwrite=False):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.save_weights(actor_filepath, overwrite=overwrite)\n self.critic.save_weights(critic_filepath, overwrite=overwrite)\n\n def update_target_models_hard(self):\n self.target_critic.set_weights(self.critic.get_weights())\n self.target_actor.set_weights(self.actor.get_weights())\n\n def reset_states(self):\n if self.random_process is not None:\n self.random_process.reset_states()\n self.recent_action = None\n self.recent_observation = None\n if self.compiled:\n self.actor.reset_states()\n self.critic.reset_states()\n self.target_actor.reset_states()\n self.target_critic.reset_states()\n\n def process_state_batch(self, batch):\n batch = np.array(batch)\n if self.processor is None:\n return batch\n return self.processor.process_state_batch(batch)\n\n def select_action(self, state):\n batch = self.process_state_batch([state])\n actions = self.actor.predict_on_batch(batch)\n if self.nb_players == 1:\n actions = [actions]\n assert len(actions) == self.nb_players\n assert actions[0].shape == (1, self.nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state_batch_with_action = batch[:]\n else:\n state_batch_with_action = [batch]\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state_batch_with_action.insert(input_idx, actions[action_idx])\n q_values = [qv.flatten() for qv in self.critic.predict_on_batch(\n state_batch_with_action)]\n assert q_values[0].shape == (1,)\n assert len(q_values) == self.nb_players\n action_best = actions[np.argmax(q_values)].flatten()\n assert action_best.shape == (self.nb_actions,)\n if self.training and self.random_process is not None:\n noise = self.random_process.sample()\n assert noise.shape == action_best.shape\n action_best += noise\n return action_best\n\n def forward(self, observation):\n state = self.memory.get_recent_state(observation)\n action = self.select_action(state)\n self.recent_observation = observation\n self.recent_action = action\n return action\n\n @property\n def layers(self):\n return self.actor.layers[:] + self.critic.layers[:]\n\n @property\n def metrics_names(self):\n names = self.critic.metrics_names[:]\n if self.processor is not None:\n names += self.processor.metrics_names[:]\n return names\n\n def backward(self, reward, terminal=False):\n if self.step % self.memory_interval == 0:\n self.memory.append(self.recent_observation, self.recent_action,\n reward, terminal, training=self.training)\n metrics = [np.nan for _ in self.metrics_names]\n if not self.training:\n return metrics\n can_train_either = (self.step > self.nb_steps_warmup_critic or self\n .step > self.nb_steps_warmup_actor)\n if can_train_either and self.step % self.train_interval == 0:\n experiences = self.memory.sample(self.batch_size)\n assert len(experiences) == self.batch_size\n state0_batch = []\n reward_batch = []\n action_batch = []\n terminal1_batch = []\n state1_batch = []\n for e in experiences:\n state0_batch.append(e.state0)\n state1_batch.append(e.state1)\n reward_batch.append(e.reward)\n action_batch.append(e.action)\n terminal1_batch.append(0.0 if e.terminal1 else 1.0)\n state0_batch = self.process_state_batch(state0_batch)\n state1_batch = self.process_state_batch(state1_batch)\n terminal1_batch = np.array(terminal1_batch)\n reward_batch = np.array(reward_batch)\n action_batch = np.array(action_batch)\n assert reward_batch.shape == (self.batch_size,)\n assert terminal1_batch.shape == reward_batch.shape\n assert action_batch.shape == (self.batch_size, self.nb_actions)\n if self.step > self.nb_steps_warmup_critic:\n target_actions = self.target_actor.predict_on_batch(\n state1_batch)\n if not isinstance(target_actions, (list,)):\n target_actions = [target_actions]\n assert len(target_actions) == self.nb_players\n assert target_actions[0].shape == (self.batch_size, self.\n nb_actions)\n if len(self.critic.inputs) > self.nb_players + 1:\n state1_batch_with_action = state1_batch[:]\n else:\n state1_batch_with_action = [state1_batch]\n for action_idx, input_idx in enumerate(self.\n critic_action_input_idxes):\n state1_batch_with_action.insert(input_idx,\n target_actions[action_idx])\n target_q_values = self.target_critic.predict_on_batch(\n state1_batch_with_action)\n if not isinstance(target_q_values, (list,)):\n target_q_values = [target_q_values]\n target_q_values = [tqv.flatten() for tqv in target_q_values]\n assert target_q_values[0].shape == reward_batch.shape\n assert len(target_q_values) == self.nb_players\n discounted_reward_batch = [(self.gamma * terminal1_batch *\n tqv) for tqv in target_q_values]\n assert discounted_reward_batch[0].shape == reward_batch.shape\n targets = [(reward_batch + drb) for drb in\n discounted_reward_batch]\n assert targets[0].shape == reward_batch.shape\n assert len(targets) == self.nb_players\n if len(self.critic.inputs) > self.nb_players + 1:\n state0_batch_with_action = state0_batch[:]\n else:\n state0_batch_with_action = [state0_batch]\n for input_idx in self.critic_action_input_idxes:\n state0_batch_with_action.insert(input_idx, action_batch)\n metrics = self.critic.train_on_batch(state0_batch_with_action,\n targets)\n if self.processor is not None:\n metrics += self.processor.metrics\n if self.step > self.nb_steps_warmup_actor:\n if len(self.actor.inputs) >= 2:\n inputs = state0_batch[:]\n else:\n inputs = [state0_batch]\n if self.uses_learning_phase:\n inputs += [self.training]\n action_values = self.actor_train_fn(inputs)\n assert len(action_values) == self.nb_players\n assert action_values[0].shape == (self.batch_size, self.\n nb_actions)\n if (self.target_model_update >= 1 and self.step % self.\n target_model_update == 0):\n self.update_target_models_hard()\n return metrics\n",
"step-5": "from __future__ import division\nfrom collections import deque\nimport os\nimport warnings\n\nimport numpy as np\nimport keras.backend as K\nimport keras.layers as layers\nimport keras.optimizers as optimizers\n\nfrom rl.core import Agent\nfrom rl.util import *\n\n\ndef mean_q(y_true, y_pred):\n return K.mean(K.max(y_pred, axis=-1))\n\n\n# Deep DPG as described by Lillicrap et al. (2015)\n# http://arxiv.org/pdf/1509.02971v2.pdf\n# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.646.4324&rep=rep1&type=pdf\nclass UBDDPGAgent(Agent):\n \"\"\"Write me\n \"\"\"\n def __init__(self, nb_actions, actor, critic, nb_players, critic_action_inputs, memory,\n gamma=.99, batch_size=32, nb_steps_warmup_critic=1000, nb_steps_warmup_actor=1000,\n train_interval=1, memory_interval=1, delta_range=None, delta_clip=np.inf,\n random_process=None, custom_model_objects={}, target_model_update=.001, **kwargs):\n assert len(critic_action_inputs) == nb_players\n if hasattr(actor.output, '__len__') and len(actor.output) != nb_players:\n raise ValueError((\n 'Actor \"{}\" does not have the right number of ',\n 'outputs. DDPG expects an actor that has {} outputs.'\n ).format(actor, nb_players))\n # if hasattr(critic.output, '__len__') and len(critic.output) > 1:\n # raise ValueError('Critic \"{}\" has more than one output. DDPG expects a critic that has a single output.'.format(critic))\n for critic_action_input in critic_action_inputs:\n if critic_action_input not in critic.input:\n raise ValueError('Critic \"{}\" does not have designated action input \"{}\".'.format(critic, critic_action_input))\n if not hasattr(critic.input, '__len__') or len(critic.input) < 2:\n raise ValueError('Critic \"{}\" does not have enough inputs. The critic must have at least two inputs, one for the action and one for the observation.'.format(critic))\n\n super(UBDDPGAgent, self).__init__(**kwargs)\n\n # Soft vs hard target model updates.\n if target_model_update < 0:\n raise ValueError('`target_model_update` must be >= 0.')\n elif target_model_update >= 1:\n # Hard update every `target_model_update` steps.\n target_model_update = int(target_model_update)\n else:\n # Soft update with `(1 - target_model_update) * old + target_model_update * new`.\n target_model_update = float(target_model_update)\n\n if delta_range is not None:\n warnings.warn('`delta_range` is deprecated. Please use `delta_clip` instead, which takes a single scalar. For now we\\'re falling back to `delta_range[1] = {}`'.format(delta_range[1]))\n delta_clip = delta_range[1]\n\n # Parameters.\n self.nb_actions = nb_actions\n self.nb_steps_warmup_actor = nb_steps_warmup_actor\n self.nb_steps_warmup_critic = nb_steps_warmup_critic\n self.random_process = random_process\n self.delta_clip = delta_clip\n self.gamma = gamma\n self.target_model_update = target_model_update\n self.batch_size = batch_size\n self.train_interval = train_interval\n self.memory_interval = memory_interval\n self.custom_model_objects = custom_model_objects\n\n # Related objects.\n self.actor = actor\n self.critic = critic\n self.nb_players = nb_players\n self.critic_action_inputs = critic_action_inputs\n self.critic_action_input_idxes = [\n self.critic.input.index(critic_action_input)\n for critic_action_input in critic_action_inputs\n ]\n self.memory = memory\n\n # State.\n self.compiled = False\n self.reset_states()\n\n @property\n def uses_learning_phase(self):\n return self.actor.uses_learning_phase or self.critic.uses_learning_phase\n\n def compile(self, optimizer, metrics=[]):\n metrics += [mean_q]\n\n if type(optimizer) in (list, tuple):\n if len(optimizer) != 2:\n raise ValueError('More than two optimizers provided. Please only provide a maximum of two optimizers, the first one for the actor and the second one for the critic.')\n actor_optimizer, critic_optimizer = optimizer\n else:\n actor_optimizer = optimizer\n critic_optimizer = clone_optimizer(optimizer)\n if type(actor_optimizer) is str:\n actor_optimizer = optimizers.get(actor_optimizer)\n if type(critic_optimizer) is str:\n critic_optimizer = optimizers.get(critic_optimizer)\n assert actor_optimizer != critic_optimizer\n\n if len(metrics) == 2 and hasattr(metrics[0], '__len__') and hasattr(metrics[1], '__len__'):\n actor_metrics, critic_metrics = metrics\n else:\n actor_metrics = critic_metrics = metrics\n\n def clipped_error(y_true, y_pred):\n y_true = K.squeeze(y_true, axis=-1)\n y_pred = K.squeeze(y_pred, axis=-1)\n loss = K.mean(\n # K.random_uniform(shape=(self.batch_size, self.nb_players), minval=0., maxval=1.) *\n huber_loss(y_true, y_pred, self.delta_clip),\n axis=-1)\n # y_true = K.print_tensor(y_true, message='y_true: ')\n # y_pred = K.print_tensor(y_pred, message='y_pred: ')\n # loss = K.print_tensor(loss, message='loss: ')\n return loss\n\n # Compile target networks. We only use them in feed-forward mode, hence we can pass any\n # optimizer and loss since we never use it anyway.\n self.target_actor = clone_model(self.actor, self.custom_model_objects)\n self.target_actor.compile(optimizer='sgd', loss='mse')\n self.target_critic = clone_model(self.critic, self.custom_model_objects)\n self.target_critic.compile(optimizer='sgd', loss='mse')\n\n # We also compile the actor. We never optimize the actor using Keras but instead compute\n # the policy gradient ourselves. However, we need the actor in feed-forward mode, hence\n # we also compile it with any optimzer and\n self.actor.compile(optimizer='sgd', loss='mse')\n\n # Compile the critic.\n if self.target_model_update < 1.:\n # We use the `AdditionalUpdatesOptimizer` to efficiently soft-update the target model.\n critic_updates = get_soft_target_model_updates(self.target_critic, self.critic, self.target_model_update)\n critic_optimizer = AdditionalUpdatesOptimizer(critic_optimizer, critic_updates)\n self.critic.compile(\n optimizer=critic_optimizer,\n loss=[clipped_error]*self.nb_players,\n metrics=critic_metrics)\n\n # Combine actor and critic so that we can get the policy gradient.\n # Assuming critic's state inputs are the same as actor's.\n critic_inputs = []\n actor_inputs = []\n for i in self.critic.input:\n if i in self.critic_action_inputs:\n critic_inputs.append([])\n else:\n critic_inputs.append(i)\n actor_inputs.append(i)\n actor_outputs = self.actor(actor_inputs)\n if not isinstance(actor_outputs, (list,)):\n actor_outputs = [actor_outputs]\n assert len(actor_outputs) == self.nb_players\n for input_idx, actor_output in zip(self.critic_action_input_idxes, actor_outputs):\n critic_inputs[input_idx] = actor_output\n\n # critic_outputs = layers.Maximum()(self.critic(critic_inputs))\n critic_outputs = self.critic(critic_inputs)\n if not isinstance(critic_outputs, (list,)):\n critic_outputs = [critic_outputs]\n assert len(critic_outputs) == self.nb_players\n\n actor_losses = [None]* self.nb_players\n for input_idx, critic_output in zip(self.critic_action_input_idxes, critic_outputs):\n actor_losses[input_idx] = -K.mean(critic_output)\n updates = actor_optimizer.get_updates(\n params=self.actor.trainable_weights,\n loss=actor_losses)\n if self.target_model_update < 1.:\n # Include soft target model updates.\n updates += get_soft_target_model_updates(self.target_actor, self.actor, self.target_model_update)\n updates += self.actor.updates # include other updates of the actor, e.g. for BN\n\n # Finally, combine it all into a callable function.\n if K.backend() == 'tensorflow':\n self.actor_train_fn = K.function(actor_inputs + [K.learning_phase()],\n actor_outputs, updates=updates)\n else:\n if self.uses_learning_phase:\n actor_inputs += [K.learning_phase()]\n self.actor_train_fn = K.function(actor_inputs, actor_outputs, updates=updates)\n self.actor_optimizer = actor_optimizer\n\n self.compiled = True\n\n def load_weights(self, filepath):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.load_weights(actor_filepath)\n self.critic.load_weights(critic_filepath)\n self.update_target_models_hard()\n\n def save_weights(self, filepath, overwrite=False):\n filename, extension = os.path.splitext(filepath)\n actor_filepath = filename + '_actor' + extension\n critic_filepath = filename + '_critic' + extension\n self.actor.save_weights(actor_filepath, overwrite=overwrite)\n self.critic.save_weights(critic_filepath, overwrite=overwrite)\n\n def update_target_models_hard(self):\n self.target_critic.set_weights(self.critic.get_weights())\n self.target_actor.set_weights(self.actor.get_weights())\n\n # TODO: implement pickle\n\n def reset_states(self):\n if self.random_process is not None:\n self.random_process.reset_states()\n self.recent_action = None\n self.recent_observation = None\n if self.compiled:\n self.actor.reset_states()\n self.critic.reset_states()\n self.target_actor.reset_states()\n self.target_critic.reset_states()\n\n def process_state_batch(self, batch):\n batch = np.array(batch)\n if self.processor is None:\n return batch\n return self.processor.process_state_batch(batch)\n\n def select_action(self, state):\n batch = self.process_state_batch([state])\n # actions = [action.flatten() for action in self.actor.predict_on_batch(batch)]\n actions = self.actor.predict_on_batch(batch)\n if self.nb_players == 1:\n actions =[actions]\n # actions = [a.flatten() for a in actions]\n assert len(actions) == self.nb_players\n # assert actions[0].shape == (self.nb_actions,)\n assert actions[0].shape == (1, self.nb_actions)\n # print('actions: {}'.format(actions))\n\n if len(self.critic.inputs) > (self.nb_players+1): # state is a list\n state_batch_with_action = batch[:]\n else:\n state_batch_with_action = [batch]\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state_batch_with_action.insert(input_idx, actions[action_idx])\n q_values = [\n qv.flatten() \n for qv in self.critic.predict_on_batch(state_batch_with_action)\n ]\n assert q_values[0].shape == (1, )\n assert len(q_values) == self.nb_players\n # print('q_values: {}'.format(q_values))\n\n action_best = actions[np.argmax(q_values)].flatten()\n # assert action_best.shape == (self.nb_actions, )\n assert action_best.shape == (self.nb_actions, )\n # print('action_best: {}'.format(action_best))\n # print(type(action_best[0]))\n\n # Apply noise, if a random process is set.\n if self.training and self.random_process is not None:\n noise = self.random_process.sample()\n assert noise.shape == action_best.shape\n action_best += noise\n\n return action_best\n\n def forward(self, observation):\n # Select an action.\n state = self.memory.get_recent_state(observation)\n action = self.select_action(state) # TODO: move this into policy\n\n # Book-keeping.\n self.recent_observation = observation\n self.recent_action = action\n\n return action\n\n @property\n def layers(self):\n return self.actor.layers[:] + self.critic.layers[:]\n\n @property\n def metrics_names(self):\n names = self.critic.metrics_names[:]\n if self.processor is not None:\n names += self.processor.metrics_names[:]\n return names\n\n def backward(self, reward, terminal=False):\n # Store most recent experience in memory.\n if self.step % self.memory_interval == 0:\n self.memory.append(self.recent_observation, self.recent_action, reward, terminal,\n training=self.training)\n\n metrics = [np.nan for _ in self.metrics_names]\n if not self.training:\n # We're done here. No need to update the experience memory since we only use the working\n # memory to obtain the state over the most recent observations.\n return metrics\n\n # Train the network on a single stochastic batch.\n can_train_either = self.step > self.nb_steps_warmup_critic or self.step > self.nb_steps_warmup_actor\n if can_train_either and self.step % self.train_interval == 0:\n experiences = self.memory.sample(self.batch_size)\n assert len(experiences) == self.batch_size\n\n # Start by extracting the necessary parameters (we use a vectorized implementation).\n state0_batch = []\n reward_batch = []\n action_batch = []\n terminal1_batch = []\n state1_batch = []\n for e in experiences:\n state0_batch.append(e.state0)\n state1_batch.append(e.state1)\n reward_batch.append(e.reward)\n action_batch.append(e.action)\n terminal1_batch.append(0. if e.terminal1 else 1.)\n\n # Prepare and validate parameters.\n state0_batch = self.process_state_batch(state0_batch)\n state1_batch = self.process_state_batch(state1_batch)\n terminal1_batch = np.array(terminal1_batch)\n reward_batch = np.array(reward_batch)\n action_batch = np.array(action_batch)\n assert reward_batch.shape == (self.batch_size,)\n assert terminal1_batch.shape == reward_batch.shape\n assert action_batch.shape == (self.batch_size, self.nb_actions)\n\n # Update critic, if warm up is over.\n if self.step > self.nb_steps_warmup_critic:\n target_actions = self.target_actor.predict_on_batch(state1_batch)\n if not isinstance(target_actions, (list,)):\n target_actions = [target_actions]\n assert len(target_actions) == self.nb_players\n assert target_actions[0].shape == (self.batch_size, self.nb_actions)\n if len(self.critic.inputs) > (self.nb_players+1): # state is a list\n # if len(self.critic.inputs) >= 3:\n state1_batch_with_action = state1_batch[:]\n else:\n state1_batch_with_action = [state1_batch]\n # state1_batch_with_action.insert(self.critic_action_input_idx, target_actions)\n for action_idx, input_idx in enumerate(self.critic_action_input_idxes):\n state1_batch_with_action.insert(input_idx, target_actions[action_idx])\n target_q_values = self.target_critic.predict_on_batch(state1_batch_with_action)\n if not isinstance(target_q_values, (list,)):\n target_q_values = [target_q_values]\n target_q_values = [ tqv.flatten() for tqv in target_q_values]\n assert target_q_values[0].shape == reward_batch.shape\n assert len(target_q_values) == self.nb_players\n\n # Compute r_t + gamma * Q(s_t+1, mu(s_t+1)) and update the target ys accordingly,\n # but only for the affected output units (as given by action_batch).\n discounted_reward_batch = [\n self.gamma * terminal1_batch * tqv\n for tqv in target_q_values\n ]\n assert discounted_reward_batch[0].shape == reward_batch.shape\n targets = [reward_batch + drb for drb in discounted_reward_batch] # .reshape(self.batch_size, 1)\n assert targets[0].shape == reward_batch.shape\n assert len(targets) == self.nb_players\n\n # Perform a single batch update on the critic network.\n # if len(self.critic.inputs) >= 3:\n if len(self.critic.inputs) > (self.nb_players+1): # state is a list\n state0_batch_with_action = state0_batch[:]\n else:\n state0_batch_with_action = [state0_batch]\n for input_idx in self.critic_action_input_idxes:\n state0_batch_with_action.insert(input_idx, action_batch)\n # state0_batch_with_action.insert(self.critic_action_input_idx, action_batch)\n metrics = self.critic.train_on_batch(\n state0_batch_with_action,\n targets)\n if self.processor is not None:\n metrics += self.processor.metrics\n\n # q_values = self.critic.predict_on_batch(state0_batch_with_action)\n # if not isinstance(q_values, (list,)):\n # q_values = [q_values]\n # q_values = [ qv.flatten() for qv in q_values]\n # print('gamma: {}'.format(self.gamma))\n # print('terminal1_batch: {}'.format(terminal1_batch))\n # print('target_q_values: {}'.format(target_q_values))\n # print('discounted_reward_batch: {}'.format(discounted_reward_batch))\n # print('reward_batch: {}'.format(reward_batch))\n # print('targets: {}'.format(targets))\n # print('current q values: {}'.format(q_values))\n\n\n # Update actor, if warm up is over.\n if self.step > self.nb_steps_warmup_actor:\n # TODO: implement metrics for actor\n if len(self.actor.inputs) >= 2:\n inputs = state0_batch[:]\n else:\n inputs = [state0_batch]\n if self.uses_learning_phase:\n inputs += [self.training]\n action_values = self.actor_train_fn(inputs)\n assert len(action_values) == self.nb_players\n assert action_values[0].shape == (self.batch_size, self.nb_actions)\n\n if self.target_model_update >= 1 and self.step % self.target_model_update == 0:\n self.update_target_models_hard()\n\n return metrics\n",
"step-ids": [
9,
12,
15,
17,
18
]
}
|
[
9,
12,
15,
17,
18
] |
#/usr/bin/env python3
"""Demonstrates how to do deterministic task generation using l2l"""
import random
def fixed_random(func):
"""Create the data"""
def _func(self, i):
state = random.getstate()
if self.deterministic or self.seed is not None:
random.seed(self.seed + i)
results = func(self, i)
random.setstate(state)
else:
results = func(self, i)
return results
return _func
class RandomTest:
def __init__(self, seed=42, deterministic=False):
self.seed = seed
self.deterministic = deterministic
@fixed_random
def test_function(self, i):
return [random.randint(0, 10) for x in range(10)]
rt = RandomTest(0)
print(rt.test_function(0))
print(rt.test_function(0))
rt.seed = 1
print(rt.test_function(0))
print(rt.test_function(0))
|
normal
|
{
"blob_id": "7ee5779625d53ff1e18f73b20ba5849666f89b55",
"index": 2111,
"step-1": "<mask token>\n\n\nclass RandomTest:\n\n def __init__(self, seed=42, deterministic=False):\n self.seed = seed\n self.deterministic = deterministic\n\n @fixed_random\n def test_function(self, i):\n return [random.randint(0, 10) for x in range(10)]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef fixed_random(func):\n \"\"\"Create the data\"\"\"\n\n def _func(self, i):\n state = random.getstate()\n if self.deterministic or self.seed is not None:\n random.seed(self.seed + i)\n results = func(self, i)\n random.setstate(state)\n else:\n results = func(self, i)\n return results\n return _func\n\n\nclass RandomTest:\n\n def __init__(self, seed=42, deterministic=False):\n self.seed = seed\n self.deterministic = deterministic\n\n @fixed_random\n def test_function(self, i):\n return [random.randint(0, 10) for x in range(10)]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef fixed_random(func):\n \"\"\"Create the data\"\"\"\n\n def _func(self, i):\n state = random.getstate()\n if self.deterministic or self.seed is not None:\n random.seed(self.seed + i)\n results = func(self, i)\n random.setstate(state)\n else:\n results = func(self, i)\n return results\n return _func\n\n\nclass RandomTest:\n\n def __init__(self, seed=42, deterministic=False):\n self.seed = seed\n self.deterministic = deterministic\n\n @fixed_random\n def test_function(self, i):\n return [random.randint(0, 10) for x in range(10)]\n\n\nrt = RandomTest(0)\nprint(rt.test_function(0))\nprint(rt.test_function(0))\nrt.seed = 1\nprint(rt.test_function(0))\nprint(rt.test_function(0))\n",
"step-4": "<mask token>\nimport random\n\n\ndef fixed_random(func):\n \"\"\"Create the data\"\"\"\n\n def _func(self, i):\n state = random.getstate()\n if self.deterministic or self.seed is not None:\n random.seed(self.seed + i)\n results = func(self, i)\n random.setstate(state)\n else:\n results = func(self, i)\n return results\n return _func\n\n\nclass RandomTest:\n\n def __init__(self, seed=42, deterministic=False):\n self.seed = seed\n self.deterministic = deterministic\n\n @fixed_random\n def test_function(self, i):\n return [random.randint(0, 10) for x in range(10)]\n\n\nrt = RandomTest(0)\nprint(rt.test_function(0))\nprint(rt.test_function(0))\nrt.seed = 1\nprint(rt.test_function(0))\nprint(rt.test_function(0))\n",
"step-5": "#/usr/bin/env python3\n\n\"\"\"Demonstrates how to do deterministic task generation using l2l\"\"\"\nimport random\n\ndef fixed_random(func):\n \"\"\"Create the data\"\"\"\n def _func(self, i):\n state = random.getstate()\n if self.deterministic or self.seed is not None:\n random.seed(self.seed + i)\n results = func(self, i)\n random.setstate(state)\n else:\n results = func(self, i)\n return results\n\n return _func\n\n\nclass RandomTest:\n\n def __init__(self, seed=42, deterministic=False):\n self.seed = seed\n self.deterministic = deterministic\n\n @fixed_random\n def test_function(self, i):\n return [random.randint(0, 10) for x in range(10)]\n\n\nrt = RandomTest(0)\nprint(rt.test_function(0))\nprint(rt.test_function(0))\nrt.seed = 1\nprint(rt.test_function(0))\nprint(rt.test_function(0))\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
import librosa
import librosa.display
import matplotlib.pyplot as plt
import os
import numpy as np
import time
import multiprocessing as mp
from tempfile import TemporaryFile
class DataSet():
def __init__(self,training_folder):
self.training_folder = training_folder
print("load Data")
def loadMelAndStft(self,filename):
wav, sr = librosa.load(filename)
stft_in = librosa.stft(wav)
mel_in = librosa.feature.melspectrogram(S=stft_in)
stft_in = np.array(stft_in)
mel_in = np.array(mel_in)
mel_in = np.swapaxes(mel_in, 0, 1)
stft_in = np.swapaxes(stft_in, 0, 1)
mel_and_stft = []
input_overlap_per_side = 1
for element in range(mel_in.shape[0]):
if(element > input_overlap_per_side and element < mel_in.shape[0]-input_overlap_per_side):
mel_in_with_overlap = []
for number in range(input_overlap_per_side*2+1):
actual_mel_index = element - input_overlap_per_side + number
mel_in_with_overlap.append(mel_in[actual_mel_index])
mel_in_with_overlap = np.asarray(mel_in_with_overlap, dtype=np.float32).flatten()
stft_in =np.asarray(stft_in, dtype=np.float32)
mel_and_stft.append([mel_in_with_overlap,stft_in[element]])
return mel_and_stft
def readFiles(self,queue,file_list,start,end):
print("start-read-file")
print("start ",start)
print("end ",end)
print("file_list ",str(len(file_list)))
load = []
for filename in file_list[start:end]:
load += self.loadMelAndStft(self.training_folder+filename)
print("Path: " + filename)
queue.put(load)
print("finished")
def main(self):
queue = mp.Queue()
file_list = os.listdir(self.training_folder)
time_before = time.time()
processes = []
file_batch_size = 50
steps= int(len(file_list)/file_batch_size)+1
print(steps)
for file_batch in range(steps):
print("run",file_batch)
start_read = file_batch*file_batch_size
end_read = file_batch*file_batch_size+file_batch_size
if len(file_list) < end_read:
end_read = len(file_list)
process = mp.Process(target=self.readFiles, args=(queue,file_list,start_read,end_read))
processes.append(process)
for process in processes:
print("start process")
process.start()
returns = []
for process in processes:
ret = queue.get() # will block
returns += ret
for process in processes:
process.join()
process.join()
print(len(returns))
print("time difference: ", str(time.time()-time_before))
return returns
|
normal
|
{
"blob_id": "ba09dbe3fbca51ece8a7d482324a2dec32e7dc8a",
"index": 5016,
"step-1": "<mask token>\n\n\nclass DataSet:\n\n def __init__(self, training_folder):\n self.training_folder = training_folder\n print('load Data')\n <mask token>\n\n def readFiles(self, queue, file_list, start, end):\n print('start-read-file')\n print('start ', start)\n print('end ', end)\n print('file_list ', str(len(file_list)))\n load = []\n for filename in file_list[start:end]:\n load += self.loadMelAndStft(self.training_folder + filename)\n print('Path: ' + filename)\n queue.put(load)\n print('finished')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DataSet:\n\n def __init__(self, training_folder):\n self.training_folder = training_folder\n print('load Data')\n <mask token>\n\n def readFiles(self, queue, file_list, start, end):\n print('start-read-file')\n print('start ', start)\n print('end ', end)\n print('file_list ', str(len(file_list)))\n load = []\n for filename in file_list[start:end]:\n load += self.loadMelAndStft(self.training_folder + filename)\n print('Path: ' + filename)\n queue.put(load)\n print('finished')\n\n def main(self):\n queue = mp.Queue()\n file_list = os.listdir(self.training_folder)\n time_before = time.time()\n processes = []\n file_batch_size = 50\n steps = int(len(file_list) / file_batch_size) + 1\n print(steps)\n for file_batch in range(steps):\n print('run', file_batch)\n start_read = file_batch * file_batch_size\n end_read = file_batch * file_batch_size + file_batch_size\n if len(file_list) < end_read:\n end_read = len(file_list)\n process = mp.Process(target=self.readFiles, args=(queue,\n file_list, start_read, end_read))\n processes.append(process)\n for process in processes:\n print('start process')\n process.start()\n returns = []\n for process in processes:\n ret = queue.get()\n returns += ret\n for process in processes:\n process.join()\n process.join()\n print(len(returns))\n print('time difference: ', str(time.time() - time_before))\n return returns\n",
"step-3": "<mask token>\n\n\nclass DataSet:\n\n def __init__(self, training_folder):\n self.training_folder = training_folder\n print('load Data')\n\n def loadMelAndStft(self, filename):\n wav, sr = librosa.load(filename)\n stft_in = librosa.stft(wav)\n mel_in = librosa.feature.melspectrogram(S=stft_in)\n stft_in = np.array(stft_in)\n mel_in = np.array(mel_in)\n mel_in = np.swapaxes(mel_in, 0, 1)\n stft_in = np.swapaxes(stft_in, 0, 1)\n mel_and_stft = []\n input_overlap_per_side = 1\n for element in range(mel_in.shape[0]):\n if element > input_overlap_per_side and element < mel_in.shape[0\n ] - input_overlap_per_side:\n mel_in_with_overlap = []\n for number in range(input_overlap_per_side * 2 + 1):\n actual_mel_index = (element - input_overlap_per_side +\n number)\n mel_in_with_overlap.append(mel_in[actual_mel_index])\n mel_in_with_overlap = np.asarray(mel_in_with_overlap, dtype\n =np.float32).flatten()\n stft_in = np.asarray(stft_in, dtype=np.float32)\n mel_and_stft.append([mel_in_with_overlap, stft_in[element]])\n return mel_and_stft\n\n def readFiles(self, queue, file_list, start, end):\n print('start-read-file')\n print('start ', start)\n print('end ', end)\n print('file_list ', str(len(file_list)))\n load = []\n for filename in file_list[start:end]:\n load += self.loadMelAndStft(self.training_folder + filename)\n print('Path: ' + filename)\n queue.put(load)\n print('finished')\n\n def main(self):\n queue = mp.Queue()\n file_list = os.listdir(self.training_folder)\n time_before = time.time()\n processes = []\n file_batch_size = 50\n steps = int(len(file_list) / file_batch_size) + 1\n print(steps)\n for file_batch in range(steps):\n print('run', file_batch)\n start_read = file_batch * file_batch_size\n end_read = file_batch * file_batch_size + file_batch_size\n if len(file_list) < end_read:\n end_read = len(file_list)\n process = mp.Process(target=self.readFiles, args=(queue,\n file_list, start_read, end_read))\n processes.append(process)\n for process in processes:\n print('start process')\n process.start()\n returns = []\n for process in processes:\n ret = queue.get()\n returns += ret\n for process in processes:\n process.join()\n process.join()\n print(len(returns))\n print('time difference: ', str(time.time() - time_before))\n return returns\n",
"step-4": "import librosa\nimport librosa.display\nimport matplotlib.pyplot as plt\nimport os\nimport numpy as np\nimport time\nimport multiprocessing as mp\nfrom tempfile import TemporaryFile\n\n\nclass DataSet:\n\n def __init__(self, training_folder):\n self.training_folder = training_folder\n print('load Data')\n\n def loadMelAndStft(self, filename):\n wav, sr = librosa.load(filename)\n stft_in = librosa.stft(wav)\n mel_in = librosa.feature.melspectrogram(S=stft_in)\n stft_in = np.array(stft_in)\n mel_in = np.array(mel_in)\n mel_in = np.swapaxes(mel_in, 0, 1)\n stft_in = np.swapaxes(stft_in, 0, 1)\n mel_and_stft = []\n input_overlap_per_side = 1\n for element in range(mel_in.shape[0]):\n if element > input_overlap_per_side and element < mel_in.shape[0\n ] - input_overlap_per_side:\n mel_in_with_overlap = []\n for number in range(input_overlap_per_side * 2 + 1):\n actual_mel_index = (element - input_overlap_per_side +\n number)\n mel_in_with_overlap.append(mel_in[actual_mel_index])\n mel_in_with_overlap = np.asarray(mel_in_with_overlap, dtype\n =np.float32).flatten()\n stft_in = np.asarray(stft_in, dtype=np.float32)\n mel_and_stft.append([mel_in_with_overlap, stft_in[element]])\n return mel_and_stft\n\n def readFiles(self, queue, file_list, start, end):\n print('start-read-file')\n print('start ', start)\n print('end ', end)\n print('file_list ', str(len(file_list)))\n load = []\n for filename in file_list[start:end]:\n load += self.loadMelAndStft(self.training_folder + filename)\n print('Path: ' + filename)\n queue.put(load)\n print('finished')\n\n def main(self):\n queue = mp.Queue()\n file_list = os.listdir(self.training_folder)\n time_before = time.time()\n processes = []\n file_batch_size = 50\n steps = int(len(file_list) / file_batch_size) + 1\n print(steps)\n for file_batch in range(steps):\n print('run', file_batch)\n start_read = file_batch * file_batch_size\n end_read = file_batch * file_batch_size + file_batch_size\n if len(file_list) < end_read:\n end_read = len(file_list)\n process = mp.Process(target=self.readFiles, args=(queue,\n file_list, start_read, end_read))\n processes.append(process)\n for process in processes:\n print('start process')\n process.start()\n returns = []\n for process in processes:\n ret = queue.get()\n returns += ret\n for process in processes:\n process.join()\n process.join()\n print(len(returns))\n print('time difference: ', str(time.time() - time_before))\n return returns\n",
"step-5": "import librosa\nimport librosa.display\nimport matplotlib.pyplot as plt\nimport os\nimport numpy as np\nimport time\nimport multiprocessing as mp\nfrom tempfile import TemporaryFile\n\nclass DataSet():\n def __init__(self,training_folder):\n self.training_folder = training_folder\n print(\"load Data\")\n\n def loadMelAndStft(self,filename):\n wav, sr = librosa.load(filename)\n stft_in = librosa.stft(wav)\n mel_in = librosa.feature.melspectrogram(S=stft_in)\n stft_in = np.array(stft_in)\n mel_in = np.array(mel_in)\n\n mel_in = np.swapaxes(mel_in, 0, 1)\n stft_in = np.swapaxes(stft_in, 0, 1)\n\n mel_and_stft = []\n input_overlap_per_side = 1\n for element in range(mel_in.shape[0]):\n if(element > input_overlap_per_side and element < mel_in.shape[0]-input_overlap_per_side):\n mel_in_with_overlap = []\n for number in range(input_overlap_per_side*2+1):\n actual_mel_index = element - input_overlap_per_side + number\n mel_in_with_overlap.append(mel_in[actual_mel_index])\n mel_in_with_overlap = np.asarray(mel_in_with_overlap, dtype=np.float32).flatten()\n stft_in =np.asarray(stft_in, dtype=np.float32)\n mel_and_stft.append([mel_in_with_overlap,stft_in[element]])\n\n return mel_and_stft\n\n def readFiles(self,queue,file_list,start,end):\n print(\"start-read-file\")\n print(\"start \",start)\n print(\"end \",end)\n print(\"file_list \",str(len(file_list)))\n load = []\n for filename in file_list[start:end]:\n load += self.loadMelAndStft(self.training_folder+filename)\n print(\"Path: \" + filename)\n queue.put(load)\n print(\"finished\")\n\n def main(self):\n queue = mp.Queue()\n file_list = os.listdir(self.training_folder)\n\n time_before = time.time()\n processes = []\n file_batch_size = 50\n steps= int(len(file_list)/file_batch_size)+1\n print(steps)\n for file_batch in range(steps):\n print(\"run\",file_batch)\n start_read = file_batch*file_batch_size\n\n end_read = file_batch*file_batch_size+file_batch_size\n if len(file_list) < end_read:\n end_read = len(file_list)\n\n process = mp.Process(target=self.readFiles, args=(queue,file_list,start_read,end_read))\n processes.append(process)\n\n for process in processes:\n print(\"start process\")\n process.start()\n returns = []\n for process in processes:\n ret = queue.get() # will block\n returns += ret\n for process in processes:\n process.join()\n process.join()\n print(len(returns))\n print(\"time difference: \", str(time.time()-time_before))\n return returns\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from django.contrib import admin, messages
from django.conf.urls import url
from django.shortcuts import render
from django.contrib.sites.models import Site
from django.http import HttpResponseRedirect, HttpResponse
from website_data.models import *
from website_data.forms import *
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
class WebsiteDataAdmin(admin.ModelAdmin):
# URLs overwriting to add new admin views (with auth check and without cache)
def get_urls(self):
urls = super(WebsiteDataAdmin, self).get_urls()
my_urls = [
# url(r'^edit-site/(?:(?P<site_id>\d+)/)$', self.admin_site.admin_view(self.edit_site)),
url(r'^create-defaults/$', self.admin_site.admin_view(self.create_defaults)),
]
# return custom URLs with default URLs
return my_urls + urls
"""
def edit_site(self, request, site_id):
""Function to select a site to edit""
WebsiteData_obj = WebsiteData()
Site_obj = Site.objects.get(pk=site_id)
if request.method == 'POST':
form = EditTextSiteForm(request.POST)
if form.is_valid():
# TODO: salvo i valori delle relative chiavi
WebsiteData_obj.set_all_keys_about_site(site_id=site_id, post=request.POST)
# pagina di successo con i dati aggiornati precompilati
messages.add_message(request, messages.SUCCESS, 'Dati salvati con successo.')
return HttpResponseRedirect('/admin/website_data/websitedata/edit-site/' + str(site_id)) # Redirect after POST
else:
form = EditTextSiteForm() # An unbound form
# precompilo la post con eventuali valori presenti
request.POST = WebsiteData_obj.get_all_keys_about_site(site_domain=Site_obj.domain)
# logger.info("chiavi salvate in db per il sito " + str(site_id) + ": " + str(request.POST))
context = {
'form' : form,
'post': request.POST,
'title': "Modifica informazioni sito: " + str(Site_obj.domain),
'opts': self.model._meta,
'app_label': self.model._meta.app_label,
'has_permission': request.user.is_superuser,
'site_url': '/',
}
return render(request, 'admin/custom_view/edit_site.html', context)
"""
def create_defaults(self, request):
"""Function to create default keys and themes"""
ThemeKeys_obj = ThemeKeys()
ThemeKeys_obj.create_default_keys()
WebsitePreferenceKeys_obj = WebsitePreferenceKeys()
WebsitePreferenceKeys_obj.create_default_keys()
context = {
'title': "Creazione chiavi e temi di default",
'opts': self.model._meta,
'app_label': self.model._meta.app_label,
'has_permission': request.user.is_superuser,
'site_url': '/',
}
messages.add_message(request, messages.SUCCESS, 'Valori di default creati con successo.')
return render(request, 'admin/custom_view/create_defaults.html', context)
def get_model_perms(self, request):
"""
https://stackoverflow.com/questions/2431727/django-admin-hide-a-model
Return empty perms dict thus hiding the model from admin index.
Per far funzionare le custom view dell'app website_data ma nascondendo
tutti i modelli, in questo modo gli url funzionano ma nell'admin non si
vede nessun modello da modificare/aggiungere.
"""
return {}
class CustomSiteInstanceInline(admin.StackedInline):
model = CustomSites
class WebsitePreferencesInstanceInline(admin.TabularInline):
model = WebsitePreferences
# Define a new Site admin
class SiteAdmin(admin.ModelAdmin):
list_filter = ('domain', 'name')
inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]
# TODO: pagine aggiuntive per l'admin (da usare solo per debug o manutenzione)
"""
admin.site.register(Themes)
admin.site.register(ThemeKeys)
admin.site.register(WebsitePreferences)
admin.site.register(WebsitePreferenceKeys)
admin.site.register(CustomSites)
"""
admin.site.unregister(Site)
admin.site.register(Site, SiteAdmin)
admin.site.register(WebsiteData, WebsiteDataAdmin)
|
normal
|
{
"blob_id": "614d6484678890df2ae0f750a3cad51a2b9bd1c6",
"index": 2315,
"step-1": "<mask token>\n\n\nclass WebsitePreferencesInstanceInline(admin.TabularInline):\n model = WebsitePreferences\n\n\nclass SiteAdmin(admin.ModelAdmin):\n list_filter = 'domain', 'name'\n inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass WebsiteDataAdmin(admin.ModelAdmin):\n\n def get_urls(self):\n urls = super(WebsiteDataAdmin, self).get_urls()\n my_urls = [url('^create-defaults/$', self.admin_site.admin_view(\n self.create_defaults))]\n return my_urls + urls\n <mask token>\n\n def create_defaults(self, request):\n \"\"\"Function to create default keys and themes\"\"\"\n ThemeKeys_obj = ThemeKeys()\n ThemeKeys_obj.create_default_keys()\n WebsitePreferenceKeys_obj = WebsitePreferenceKeys()\n WebsitePreferenceKeys_obj.create_default_keys()\n context = {'title': 'Creazione chiavi e temi di default', 'opts':\n self.model._meta, 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser, 'site_url': '/'}\n messages.add_message(request, messages.SUCCESS,\n 'Valori di default creati con successo.')\n return render(request, 'admin/custom_view/create_defaults.html',\n context)\n\n def get_model_perms(self, request):\n \"\"\"\n https://stackoverflow.com/questions/2431727/django-admin-hide-a-model\n\n Return empty perms dict thus hiding the model from admin index.\n Per far funzionare le custom view dell'app website_data ma nascondendo\n tutti i modelli, in questo modo gli url funzionano ma nell'admin non si\n vede nessun modello da modificare/aggiungere.\n \"\"\"\n return {}\n\n\nclass CustomSiteInstanceInline(admin.StackedInline):\n model = CustomSites\n\n\nclass WebsitePreferencesInstanceInline(admin.TabularInline):\n model = WebsitePreferences\n\n\nclass SiteAdmin(admin.ModelAdmin):\n list_filter = 'domain', 'name'\n inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass WebsiteDataAdmin(admin.ModelAdmin):\n\n def get_urls(self):\n urls = super(WebsiteDataAdmin, self).get_urls()\n my_urls = [url('^create-defaults/$', self.admin_site.admin_view(\n self.create_defaults))]\n return my_urls + urls\n \"\"\"\n def edit_site(self, request, site_id):\n \"\"Function to select a site to edit\"\"\n\n WebsiteData_obj = WebsiteData()\n Site_obj = Site.objects.get(pk=site_id)\n\n if request.method == 'POST':\n form = EditTextSiteForm(request.POST)\n\n if form.is_valid():\n # TODO: salvo i valori delle relative chiavi\n WebsiteData_obj.set_all_keys_about_site(site_id=site_id, post=request.POST)\n\n # pagina di successo con i dati aggiornati precompilati\n messages.add_message(request, messages.SUCCESS, 'Dati salvati con successo.')\n return HttpResponseRedirect('/admin/website_data/websitedata/edit-site/' + str(site_id)) # Redirect after POST\n else:\n form = EditTextSiteForm() # An unbound form\n # precompilo la post con eventuali valori presenti\n request.POST = WebsiteData_obj.get_all_keys_about_site(site_domain=Site_obj.domain)\n # logger.info(\"chiavi salvate in db per il sito \" + str(site_id) + \": \" + str(request.POST))\n\n context = {\n 'form' : form,\n 'post': request.POST,\n 'title': \"Modifica informazioni sito: \" + str(Site_obj.domain),\n 'opts': self.model._meta,\n 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser,\n 'site_url': '/',\n }\n\n return render(request, 'admin/custom_view/edit_site.html', context)\n \"\"\"\n\n def create_defaults(self, request):\n \"\"\"Function to create default keys and themes\"\"\"\n ThemeKeys_obj = ThemeKeys()\n ThemeKeys_obj.create_default_keys()\n WebsitePreferenceKeys_obj = WebsitePreferenceKeys()\n WebsitePreferenceKeys_obj.create_default_keys()\n context = {'title': 'Creazione chiavi e temi di default', 'opts':\n self.model._meta, 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser, 'site_url': '/'}\n messages.add_message(request, messages.SUCCESS,\n 'Valori di default creati con successo.')\n return render(request, 'admin/custom_view/create_defaults.html',\n context)\n\n def get_model_perms(self, request):\n \"\"\"\n https://stackoverflow.com/questions/2431727/django-admin-hide-a-model\n\n Return empty perms dict thus hiding the model from admin index.\n Per far funzionare le custom view dell'app website_data ma nascondendo\n tutti i modelli, in questo modo gli url funzionano ma nell'admin non si\n vede nessun modello da modificare/aggiungere.\n \"\"\"\n return {}\n\n\nclass CustomSiteInstanceInline(admin.StackedInline):\n model = CustomSites\n\n\nclass WebsitePreferencesInstanceInline(admin.TabularInline):\n model = WebsitePreferences\n\n\nclass SiteAdmin(admin.ModelAdmin):\n list_filter = 'domain', 'name'\n inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]\n\n\n<mask token>\nadmin.site.unregister(Site)\nadmin.site.register(Site, SiteAdmin)\nadmin.site.register(WebsiteData, WebsiteDataAdmin)\n",
"step-4": "from django.contrib import admin, messages\nfrom django.conf.urls import url\nfrom django.shortcuts import render\nfrom django.contrib.sites.models import Site\nfrom django.http import HttpResponseRedirect, HttpResponse\nfrom website_data.models import *\nfrom website_data.forms import *\nimport logging\nlogger = logging.getLogger(__name__)\n\n\nclass WebsiteDataAdmin(admin.ModelAdmin):\n\n def get_urls(self):\n urls = super(WebsiteDataAdmin, self).get_urls()\n my_urls = [url('^create-defaults/$', self.admin_site.admin_view(\n self.create_defaults))]\n return my_urls + urls\n \"\"\"\n def edit_site(self, request, site_id):\n \"\"Function to select a site to edit\"\"\n\n WebsiteData_obj = WebsiteData()\n Site_obj = Site.objects.get(pk=site_id)\n\n if request.method == 'POST':\n form = EditTextSiteForm(request.POST)\n\n if form.is_valid():\n # TODO: salvo i valori delle relative chiavi\n WebsiteData_obj.set_all_keys_about_site(site_id=site_id, post=request.POST)\n\n # pagina di successo con i dati aggiornati precompilati\n messages.add_message(request, messages.SUCCESS, 'Dati salvati con successo.')\n return HttpResponseRedirect('/admin/website_data/websitedata/edit-site/' + str(site_id)) # Redirect after POST\n else:\n form = EditTextSiteForm() # An unbound form\n # precompilo la post con eventuali valori presenti\n request.POST = WebsiteData_obj.get_all_keys_about_site(site_domain=Site_obj.domain)\n # logger.info(\"chiavi salvate in db per il sito \" + str(site_id) + \": \" + str(request.POST))\n\n context = {\n 'form' : form,\n 'post': request.POST,\n 'title': \"Modifica informazioni sito: \" + str(Site_obj.domain),\n 'opts': self.model._meta,\n 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser,\n 'site_url': '/',\n }\n\n return render(request, 'admin/custom_view/edit_site.html', context)\n \"\"\"\n\n def create_defaults(self, request):\n \"\"\"Function to create default keys and themes\"\"\"\n ThemeKeys_obj = ThemeKeys()\n ThemeKeys_obj.create_default_keys()\n WebsitePreferenceKeys_obj = WebsitePreferenceKeys()\n WebsitePreferenceKeys_obj.create_default_keys()\n context = {'title': 'Creazione chiavi e temi di default', 'opts':\n self.model._meta, 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser, 'site_url': '/'}\n messages.add_message(request, messages.SUCCESS,\n 'Valori di default creati con successo.')\n return render(request, 'admin/custom_view/create_defaults.html',\n context)\n\n def get_model_perms(self, request):\n \"\"\"\n https://stackoverflow.com/questions/2431727/django-admin-hide-a-model\n\n Return empty perms dict thus hiding the model from admin index.\n Per far funzionare le custom view dell'app website_data ma nascondendo\n tutti i modelli, in questo modo gli url funzionano ma nell'admin non si\n vede nessun modello da modificare/aggiungere.\n \"\"\"\n return {}\n\n\nclass CustomSiteInstanceInline(admin.StackedInline):\n model = CustomSites\n\n\nclass WebsitePreferencesInstanceInline(admin.TabularInline):\n model = WebsitePreferences\n\n\nclass SiteAdmin(admin.ModelAdmin):\n list_filter = 'domain', 'name'\n inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]\n\n\n<mask token>\nadmin.site.unregister(Site)\nadmin.site.register(Site, SiteAdmin)\nadmin.site.register(WebsiteData, WebsiteDataAdmin)\n",
"step-5": "from django.contrib import admin, messages\nfrom django.conf.urls import url\nfrom django.shortcuts import render\nfrom django.contrib.sites.models import Site\nfrom django.http import HttpResponseRedirect, HttpResponse\nfrom website_data.models import *\nfrom website_data.forms import *\nimport logging\n\n# Get an instance of a logger\nlogger = logging.getLogger(__name__)\n\nclass WebsiteDataAdmin(admin.ModelAdmin):\n\n # URLs overwriting to add new admin views (with auth check and without cache)\n def get_urls(self):\n urls = super(WebsiteDataAdmin, self).get_urls()\n my_urls = [\n # url(r'^edit-site/(?:(?P<site_id>\\d+)/)$', self.admin_site.admin_view(self.edit_site)),\n url(r'^create-defaults/$', self.admin_site.admin_view(self.create_defaults)),\n ]\n\n # return custom URLs with default URLs\n return my_urls + urls\n\n \"\"\"\n def edit_site(self, request, site_id):\n \"\"Function to select a site to edit\"\"\n\n WebsiteData_obj = WebsiteData()\n Site_obj = Site.objects.get(pk=site_id)\n\n if request.method == 'POST':\n form = EditTextSiteForm(request.POST)\n\n if form.is_valid():\n # TODO: salvo i valori delle relative chiavi\n WebsiteData_obj.set_all_keys_about_site(site_id=site_id, post=request.POST)\n\n # pagina di successo con i dati aggiornati precompilati\n messages.add_message(request, messages.SUCCESS, 'Dati salvati con successo.')\n return HttpResponseRedirect('/admin/website_data/websitedata/edit-site/' + str(site_id)) # Redirect after POST\n else:\n form = EditTextSiteForm() # An unbound form\n # precompilo la post con eventuali valori presenti\n request.POST = WebsiteData_obj.get_all_keys_about_site(site_domain=Site_obj.domain)\n # logger.info(\"chiavi salvate in db per il sito \" + str(site_id) + \": \" + str(request.POST))\n\n context = {\n 'form' : form,\n 'post': request.POST,\n 'title': \"Modifica informazioni sito: \" + str(Site_obj.domain),\n 'opts': self.model._meta,\n 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser,\n 'site_url': '/',\n }\n\n return render(request, 'admin/custom_view/edit_site.html', context)\n \"\"\"\n\n def create_defaults(self, request):\n \"\"\"Function to create default keys and themes\"\"\"\n ThemeKeys_obj = ThemeKeys()\n ThemeKeys_obj.create_default_keys()\n\n WebsitePreferenceKeys_obj = WebsitePreferenceKeys()\n WebsitePreferenceKeys_obj.create_default_keys()\n\n context = {\n 'title': \"Creazione chiavi e temi di default\",\n 'opts': self.model._meta,\n 'app_label': self.model._meta.app_label,\n 'has_permission': request.user.is_superuser,\n 'site_url': '/',\n }\n\n messages.add_message(request, messages.SUCCESS, 'Valori di default creati con successo.')\n\n return render(request, 'admin/custom_view/create_defaults.html', context)\n\n def get_model_perms(self, request):\n \"\"\"\n https://stackoverflow.com/questions/2431727/django-admin-hide-a-model\n\n Return empty perms dict thus hiding the model from admin index.\n Per far funzionare le custom view dell'app website_data ma nascondendo\n tutti i modelli, in questo modo gli url funzionano ma nell'admin non si\n vede nessun modello da modificare/aggiungere.\n \"\"\"\n return {}\n\nclass CustomSiteInstanceInline(admin.StackedInline):\n model = CustomSites\n\nclass WebsitePreferencesInstanceInline(admin.TabularInline):\n model = WebsitePreferences\n\n# Define a new Site admin\nclass SiteAdmin(admin.ModelAdmin):\n list_filter = ('domain', 'name')\n inlines = [CustomSiteInstanceInline, WebsitePreferencesInstanceInline]\n\n# TODO: pagine aggiuntive per l'admin (da usare solo per debug o manutenzione)\n\"\"\"\nadmin.site.register(Themes)\nadmin.site.register(ThemeKeys)\nadmin.site.register(WebsitePreferences)\nadmin.site.register(WebsitePreferenceKeys)\nadmin.site.register(CustomSites)\n\"\"\"\n\nadmin.site.unregister(Site)\nadmin.site.register(Site, SiteAdmin)\nadmin.site.register(WebsiteData, WebsiteDataAdmin)\n",
"step-ids": [
4,
10,
12,
14,
15
]
}
|
[
4,
10,
12,
14,
15
] |
from typing import List
import glm
import pxng
import OpenGL.GL as gl
class VertexArrayObject:
def __init__(self, primitive):
self._primitive = primitive
self._buffers: List[pxng.BufferObject] = []
self._indices = pxng.BufferObject(data_type=self.index_data_type,
array_type=gl.GL_ELEMENT_ARRAY_BUFFER)
self._vao = gl.glGenVertexArrays(1)
def attach_buffer(self, vbo: pxng.BufferObject):
self._buffers.append(vbo)
return len(self._buffers) - 1
def add_quad(self, p1, p2, p3, p4):
i = self._buffers[0].index
self._buffers[0].set_value(p1)
self._buffers[0].set_value(p2)
self._buffers[0].set_value(p3)
self._buffers[0].set_value(p4)
self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))
self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))
def add_triangle(self, p1, p2, p3):
i = self._buffers[0].index
self._buffers[0].set_value(p1)
self._buffers[0].set_value(p2)
self._buffers[0].set_value(p3)
self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))
def add_line(self, p1, p2):
i = self._buffers[0].index
self._buffers[0].set_value(p1)
self._buffers[0].set_value(p2)
self._indices.set_value(glm.u16vec2(i, i + 1))
def add_point(self, p1):
i = self._buffers[0].index
self._buffers[0].set_value(p1)
self._indices.set_value(glm.u16vec1(i))
def set_colors(self, *args: glm.vec4, target=1):
for c in args:
self._buffers[target].set_value(c)
def set_texture(self, *args: glm.vec2 or glm.uvec2, target=1):
for c in args:
self._buffers[target].set_value(c)
def create(self):
gl.glBindVertexArray(self._vao)
for index, vbo in enumerate(self._buffers):
vbo.bind(index)
self._indices.bind(None)
def reset(self):
self._indices.reset()
for vbo in self._buffers:
vbo.reset()
def draw(self):
index_count = len(self._indices) * self.primitive_component_count
gl.glDrawElements(self._primitive, index_count, gl.GL_UNSIGNED_SHORT, None)
@property
def index_data_type(self):
if self._primitive == gl.GL_TRIANGLES:
return glm.u16vec3
elif self._primitive == gl.GL_LINES:
return glm.u16vec2
elif self._primitive == gl.GL_POINTS:
return glm.u16vec1
else:
raise UserWarning(f'Unknown primitive type {self._primitive}')
@property
def primitive_component_count(self):
if self._primitive == gl.GL_TRIANGLES:
return 3
elif self._primitive == gl.GL_LINES:
return 2
elif self._primitive == gl.GL_POINTS:
return 1
else:
raise UserWarning(f'Unknown primitive type {self._primitive}')
def bind(self):
gl.glBindVertexArray(self._vao)
if self._indices.bind(None):
if any(vbo.changed for vbo in self._buffers):
self.create()
return True
gl.glBindVertexArray(0)
return False
|
normal
|
{
"blob_id": "7530c2c85f83d1714840ba97c1ec702f063658c5",
"index": 379,
"step-1": "<mask token>\n\n\nclass VertexArrayObject:\n\n def __init__(self, primitive):\n self._primitive = primitive\n self._buffers: List[pxng.BufferObject] = []\n self._indices = pxng.BufferObject(data_type=self.index_data_type,\n array_type=gl.GL_ELEMENT_ARRAY_BUFFER)\n self._vao = gl.glGenVertexArrays(1)\n <mask token>\n\n def add_quad(self, p1, p2, p3, p4):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._buffers[0].set_value(p4)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))\n self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))\n\n def add_triangle(self, p1, p2, p3):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))\n\n def add_line(self, p1, p2):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._indices.set_value(glm.u16vec2(i, i + 1))\n <mask token>\n\n def set_colors(self, *args: glm.vec4, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def set_texture(self, *args: (glm.vec2 or glm.uvec2), target=1):\n for c in args:\n self._buffers[target].set_value(c)\n <mask token>\n <mask token>\n\n def draw(self):\n index_count = len(self._indices) * self.primitive_component_count\n gl.glDrawElements(self._primitive, index_count, gl.\n GL_UNSIGNED_SHORT, None)\n <mask token>\n\n @property\n def primitive_component_count(self):\n if self._primitive == gl.GL_TRIANGLES:\n return 3\n elif self._primitive == gl.GL_LINES:\n return 2\n elif self._primitive == gl.GL_POINTS:\n return 1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass VertexArrayObject:\n\n def __init__(self, primitive):\n self._primitive = primitive\n self._buffers: List[pxng.BufferObject] = []\n self._indices = pxng.BufferObject(data_type=self.index_data_type,\n array_type=gl.GL_ELEMENT_ARRAY_BUFFER)\n self._vao = gl.glGenVertexArrays(1)\n <mask token>\n\n def add_quad(self, p1, p2, p3, p4):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._buffers[0].set_value(p4)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))\n self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))\n\n def add_triangle(self, p1, p2, p3):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))\n\n def add_line(self, p1, p2):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._indices.set_value(glm.u16vec2(i, i + 1))\n <mask token>\n\n def set_colors(self, *args: glm.vec4, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def set_texture(self, *args: (glm.vec2 or glm.uvec2), target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def create(self):\n gl.glBindVertexArray(self._vao)\n for index, vbo in enumerate(self._buffers):\n vbo.bind(index)\n self._indices.bind(None)\n <mask token>\n\n def draw(self):\n index_count = len(self._indices) * self.primitive_component_count\n gl.glDrawElements(self._primitive, index_count, gl.\n GL_UNSIGNED_SHORT, None)\n\n @property\n def index_data_type(self):\n if self._primitive == gl.GL_TRIANGLES:\n return glm.u16vec3\n elif self._primitive == gl.GL_LINES:\n return glm.u16vec2\n elif self._primitive == gl.GL_POINTS:\n return glm.u16vec1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n @property\n def primitive_component_count(self):\n if self._primitive == gl.GL_TRIANGLES:\n return 3\n elif self._primitive == gl.GL_LINES:\n return 2\n elif self._primitive == gl.GL_POINTS:\n return 1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass VertexArrayObject:\n\n def __init__(self, primitive):\n self._primitive = primitive\n self._buffers: List[pxng.BufferObject] = []\n self._indices = pxng.BufferObject(data_type=self.index_data_type,\n array_type=gl.GL_ELEMENT_ARRAY_BUFFER)\n self._vao = gl.glGenVertexArrays(1)\n <mask token>\n\n def add_quad(self, p1, p2, p3, p4):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._buffers[0].set_value(p4)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))\n self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))\n\n def add_triangle(self, p1, p2, p3):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))\n\n def add_line(self, p1, p2):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._indices.set_value(glm.u16vec2(i, i + 1))\n <mask token>\n\n def set_colors(self, *args: glm.vec4, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def set_texture(self, *args: (glm.vec2 or glm.uvec2), target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def create(self):\n gl.glBindVertexArray(self._vao)\n for index, vbo in enumerate(self._buffers):\n vbo.bind(index)\n self._indices.bind(None)\n <mask token>\n\n def draw(self):\n index_count = len(self._indices) * self.primitive_component_count\n gl.glDrawElements(self._primitive, index_count, gl.\n GL_UNSIGNED_SHORT, None)\n\n @property\n def index_data_type(self):\n if self._primitive == gl.GL_TRIANGLES:\n return glm.u16vec3\n elif self._primitive == gl.GL_LINES:\n return glm.u16vec2\n elif self._primitive == gl.GL_POINTS:\n return glm.u16vec1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n @property\n def primitive_component_count(self):\n if self._primitive == gl.GL_TRIANGLES:\n return 3\n elif self._primitive == gl.GL_LINES:\n return 2\n elif self._primitive == gl.GL_POINTS:\n return 1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n def bind(self):\n gl.glBindVertexArray(self._vao)\n if self._indices.bind(None):\n if any(vbo.changed for vbo in self._buffers):\n self.create()\n return True\n gl.glBindVertexArray(0)\n return False\n",
"step-4": "<mask token>\n\n\nclass VertexArrayObject:\n\n def __init__(self, primitive):\n self._primitive = primitive\n self._buffers: List[pxng.BufferObject] = []\n self._indices = pxng.BufferObject(data_type=self.index_data_type,\n array_type=gl.GL_ELEMENT_ARRAY_BUFFER)\n self._vao = gl.glGenVertexArrays(1)\n <mask token>\n\n def add_quad(self, p1, p2, p3, p4):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._buffers[0].set_value(p4)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))\n self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))\n\n def add_triangle(self, p1, p2, p3):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))\n\n def add_line(self, p1, p2):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._indices.set_value(glm.u16vec2(i, i + 1))\n <mask token>\n\n def set_colors(self, *args: glm.vec4, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def set_texture(self, *args: (glm.vec2 or glm.uvec2), target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def create(self):\n gl.glBindVertexArray(self._vao)\n for index, vbo in enumerate(self._buffers):\n vbo.bind(index)\n self._indices.bind(None)\n\n def reset(self):\n self._indices.reset()\n for vbo in self._buffers:\n vbo.reset()\n\n def draw(self):\n index_count = len(self._indices) * self.primitive_component_count\n gl.glDrawElements(self._primitive, index_count, gl.\n GL_UNSIGNED_SHORT, None)\n\n @property\n def index_data_type(self):\n if self._primitive == gl.GL_TRIANGLES:\n return glm.u16vec3\n elif self._primitive == gl.GL_LINES:\n return glm.u16vec2\n elif self._primitive == gl.GL_POINTS:\n return glm.u16vec1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n @property\n def primitive_component_count(self):\n if self._primitive == gl.GL_TRIANGLES:\n return 3\n elif self._primitive == gl.GL_LINES:\n return 2\n elif self._primitive == gl.GL_POINTS:\n return 1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n def bind(self):\n gl.glBindVertexArray(self._vao)\n if self._indices.bind(None):\n if any(vbo.changed for vbo in self._buffers):\n self.create()\n return True\n gl.glBindVertexArray(0)\n return False\n",
"step-5": "from typing import List\n\nimport glm\nimport pxng\n\nimport OpenGL.GL as gl\n\n\nclass VertexArrayObject:\n def __init__(self, primitive):\n self._primitive = primitive\n self._buffers: List[pxng.BufferObject] = []\n self._indices = pxng.BufferObject(data_type=self.index_data_type,\n array_type=gl.GL_ELEMENT_ARRAY_BUFFER)\n self._vao = gl.glGenVertexArrays(1)\n\n def attach_buffer(self, vbo: pxng.BufferObject):\n self._buffers.append(vbo)\n return len(self._buffers) - 1\n\n def add_quad(self, p1, p2, p3, p4):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._buffers[0].set_value(p4)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 3))\n self._indices.set_value(glm.u16vec3(i + 1, i + 2, i + 3))\n\n def add_triangle(self, p1, p2, p3):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._buffers[0].set_value(p3)\n self._indices.set_value(glm.u16vec3(i, i + 1, i + 2))\n\n def add_line(self, p1, p2):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._buffers[0].set_value(p2)\n self._indices.set_value(glm.u16vec2(i, i + 1))\n\n def add_point(self, p1):\n i = self._buffers[0].index\n self._buffers[0].set_value(p1)\n self._indices.set_value(glm.u16vec1(i))\n\n def set_colors(self, *args: glm.vec4, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def set_texture(self, *args: glm.vec2 or glm.uvec2, target=1):\n for c in args:\n self._buffers[target].set_value(c)\n\n def create(self):\n gl.glBindVertexArray(self._vao)\n\n for index, vbo in enumerate(self._buffers):\n vbo.bind(index)\n\n self._indices.bind(None)\n\n def reset(self):\n self._indices.reset()\n for vbo in self._buffers:\n vbo.reset()\n\n def draw(self):\n index_count = len(self._indices) * self.primitive_component_count\n gl.glDrawElements(self._primitive, index_count, gl.GL_UNSIGNED_SHORT, None)\n\n @property\n def index_data_type(self):\n if self._primitive == gl.GL_TRIANGLES:\n return glm.u16vec3\n elif self._primitive == gl.GL_LINES:\n return glm.u16vec2\n elif self._primitive == gl.GL_POINTS:\n return glm.u16vec1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n @property\n def primitive_component_count(self):\n if self._primitive == gl.GL_TRIANGLES:\n return 3\n elif self._primitive == gl.GL_LINES:\n return 2\n elif self._primitive == gl.GL_POINTS:\n return 1\n else:\n raise UserWarning(f'Unknown primitive type {self._primitive}')\n\n def bind(self):\n gl.glBindVertexArray(self._vao)\n if self._indices.bind(None):\n if any(vbo.changed for vbo in self._buffers):\n self.create()\n return True\n gl.glBindVertexArray(0)\n return False\n",
"step-ids": [
9,
11,
12,
13,
17
]
}
|
[
9,
11,
12,
13,
17
] |
#!/usr/bin/env python3
#
# Display all tags in the specified file for neoview.
# Author: Andrew Pyatkov <[email protected]>
# License: MIT
#
"""
Display all tags in the specified file for neoview.
Output: {file_name}\t{tag_address}\t{displayable_tag_info}
"""
import argparse
#import os
#import re
import subprocess
# Parse command line args.
parser = argparse.ArgumentParser()
parser.add_argument("file", help="File name to display tags from")
args = parser.parse_args()
filename = args.file
# Colors for the output, see for more info:
# https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit
COLOR_TAGTYPE = '\033[1;35m'
COLOR_TAGNAME = ''
COLOR_COMMENT = '\033[0;32m'
COLOR_BAR = '\033[0;37m'
COLOR_RESET = '\033[m'
# Contains lists of [file_name, tag_address, tag_name, comment].
# 'file_name' is relative to the current directory.
# 'tag_address' can be a number or a "/^line$/".
tags = []
# Max length of a tag name.
max_tag_len = 0
cmd = 'ctags -f - --excmd=number %s' % filename
result = subprocess.check_output(cmd, shell=True)
out = result.decode("utf-8", errors="ignore").rstrip().split("\n")
def displayable_info(tagname, comment):
cs = comment.split("\t", 1)
return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').\
format(
COLOR_TAGNAME, tagname, COLOR_RESET,
COLOR_BAR, COLOR_TAGTYPE, cs[0], COLOR_BAR, COLOR_RESET,
COLOR_COMMENT, cs[1] if len(cs) == 2 else "", COLOR_RESET)
for l in out:
# t[0] - tag name, t[1] - file name, t[2] - tag address and comment
t = l.split("\t", 2)
max_tag_len = max(max_tag_len, len(t[0]))
# info[0] - tag address, info[1] - comment
info = t[2].split(';"')
tags.append([t[1], info[0], t[0], info[1].strip()])
for t in tags:
print('%s\t%s\t%s' %
(t[0], t[1], displayable_info(t[2], t[3])))
|
normal
|
{
"blob_id": "b220cacc2530ca62b5599a9c1894e979bcfd5109",
"index": 9633,
"step-1": "<mask token>\n\n\ndef displayable_info(tagname, comment):\n cs = comment.split('\\t', 1)\n return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').format(\n COLOR_TAGNAME, tagname, COLOR_RESET, COLOR_BAR, COLOR_TAGTYPE, cs[0\n ], COLOR_BAR, COLOR_RESET, COLOR_COMMENT, cs[1] if len(cs) == 2 else\n '', COLOR_RESET)\n\n\n<mask token>\n",
"step-2": "<mask token>\nparser.add_argument('file', help='File name to display tags from')\n<mask token>\n\n\ndef displayable_info(tagname, comment):\n cs = comment.split('\\t', 1)\n return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').format(\n COLOR_TAGNAME, tagname, COLOR_RESET, COLOR_BAR, COLOR_TAGTYPE, cs[0\n ], COLOR_BAR, COLOR_RESET, COLOR_COMMENT, cs[1] if len(cs) == 2 else\n '', COLOR_RESET)\n\n\nfor l in out:\n t = l.split('\\t', 2)\n max_tag_len = max(max_tag_len, len(t[0]))\n info = t[2].split(';\"')\n tags.append([t[1], info[0], t[0], info[1].strip()])\nfor t in tags:\n print('%s\\t%s\\t%s' % (t[0], t[1], displayable_info(t[2], t[3])))\n",
"step-3": "<mask token>\nparser = argparse.ArgumentParser()\nparser.add_argument('file', help='File name to display tags from')\nargs = parser.parse_args()\nfilename = args.file\nCOLOR_TAGTYPE = '\\x1b[1;35m'\nCOLOR_TAGNAME = ''\nCOLOR_COMMENT = '\\x1b[0;32m'\nCOLOR_BAR = '\\x1b[0;37m'\nCOLOR_RESET = '\\x1b[m'\ntags = []\nmax_tag_len = 0\ncmd = 'ctags -f - --excmd=number %s' % filename\nresult = subprocess.check_output(cmd, shell=True)\nout = result.decode('utf-8', errors='ignore').rstrip().split('\\n')\n\n\ndef displayable_info(tagname, comment):\n cs = comment.split('\\t', 1)\n return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').format(\n COLOR_TAGNAME, tagname, COLOR_RESET, COLOR_BAR, COLOR_TAGTYPE, cs[0\n ], COLOR_BAR, COLOR_RESET, COLOR_COMMENT, cs[1] if len(cs) == 2 else\n '', COLOR_RESET)\n\n\nfor l in out:\n t = l.split('\\t', 2)\n max_tag_len = max(max_tag_len, len(t[0]))\n info = t[2].split(';\"')\n tags.append([t[1], info[0], t[0], info[1].strip()])\nfor t in tags:\n print('%s\\t%s\\t%s' % (t[0], t[1], displayable_info(t[2], t[3])))\n",
"step-4": "<mask token>\nimport argparse\nimport subprocess\nparser = argparse.ArgumentParser()\nparser.add_argument('file', help='File name to display tags from')\nargs = parser.parse_args()\nfilename = args.file\nCOLOR_TAGTYPE = '\\x1b[1;35m'\nCOLOR_TAGNAME = ''\nCOLOR_COMMENT = '\\x1b[0;32m'\nCOLOR_BAR = '\\x1b[0;37m'\nCOLOR_RESET = '\\x1b[m'\ntags = []\nmax_tag_len = 0\ncmd = 'ctags -f - --excmd=number %s' % filename\nresult = subprocess.check_output(cmd, shell=True)\nout = result.decode('utf-8', errors='ignore').rstrip().split('\\n')\n\n\ndef displayable_info(tagname, comment):\n cs = comment.split('\\t', 1)\n return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').format(\n COLOR_TAGNAME, tagname, COLOR_RESET, COLOR_BAR, COLOR_TAGTYPE, cs[0\n ], COLOR_BAR, COLOR_RESET, COLOR_COMMENT, cs[1] if len(cs) == 2 else\n '', COLOR_RESET)\n\n\nfor l in out:\n t = l.split('\\t', 2)\n max_tag_len = max(max_tag_len, len(t[0]))\n info = t[2].split(';\"')\n tags.append([t[1], info[0], t[0], info[1].strip()])\nfor t in tags:\n print('%s\\t%s\\t%s' % (t[0], t[1], displayable_info(t[2], t[3])))\n",
"step-5": "#!/usr/bin/env python3\n#\n# Display all tags in the specified file for neoview.\n# Author: Andrew Pyatkov <[email protected]>\n# License: MIT\n#\n\"\"\"\nDisplay all tags in the specified file for neoview.\nOutput: {file_name}\\t{tag_address}\\t{displayable_tag_info}\n\"\"\"\nimport argparse\n#import os\n#import re\nimport subprocess\n\n# Parse command line args.\nparser = argparse.ArgumentParser()\nparser.add_argument(\"file\", help=\"File name to display tags from\")\nargs = parser.parse_args()\n\nfilename = args.file\n\n# Colors for the output, see for more info:\n# https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit\nCOLOR_TAGTYPE = '\\033[1;35m'\nCOLOR_TAGNAME = ''\nCOLOR_COMMENT = '\\033[0;32m'\nCOLOR_BAR = '\\033[0;37m'\nCOLOR_RESET = '\\033[m'\n\n# Contains lists of [file_name, tag_address, tag_name, comment].\n# 'file_name' is relative to the current directory.\n# 'tag_address' can be a number or a \"/^line$/\".\ntags = []\n\n# Max length of a tag name.\nmax_tag_len = 0\n\ncmd = 'ctags -f - --excmd=number %s' % filename\n\nresult = subprocess.check_output(cmd, shell=True)\nout = result.decode(\"utf-8\", errors=\"ignore\").rstrip().split(\"\\n\")\n\n\ndef displayable_info(tagname, comment):\n cs = comment.split(\"\\t\", 1)\n return ('{}{:<' + str(max_tag_len) + '}{} {}|{}{}{}|{} {}{}{}').\\\n format(\n COLOR_TAGNAME, tagname, COLOR_RESET,\n COLOR_BAR, COLOR_TAGTYPE, cs[0], COLOR_BAR, COLOR_RESET,\n COLOR_COMMENT, cs[1] if len(cs) == 2 else \"\", COLOR_RESET)\n\nfor l in out:\n # t[0] - tag name, t[1] - file name, t[2] - tag address and comment\n t = l.split(\"\\t\", 2)\n max_tag_len = max(max_tag_len, len(t[0]))\n # info[0] - tag address, info[1] - comment\n info = t[2].split(';\"')\n tags.append([t[1], info[0], t[0], info[1].strip()])\n\nfor t in tags:\n print('%s\\t%s\\t%s' %\n (t[0], t[1], displayable_info(t[2], t[3])))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from Graph import *
from PrioQueue import *
from GShortestPath import *
from GSpanTree import *
from User import *
infinity = float("inf")
# 这是根据关键字找地点的方法,已经形成了某个依据属性的表后,通过关键词匹配来解决问题
# 最终输出一个yield出的迭代器,将其list化后就可以向末端输出了
def find_by_word(lst, word):
# 这个是字符串匹配函数,word是客户输入,lst是循环的东西
# 最好排成优先队列
# 若没找到,我们可以造一个关于word的任意位置的切片,长度比word短,由此来寻找想要的名称
# 由于景点,地名的长度一般不长,所以即使这里的时间代价极高,我们也可以保证这样做不会引发混乱
ans = []
for x in lst:
if word == x:
ans.append(x)
if len(word) > 20:
raise ValuError("in find_by_word, we don't think it's possible for a city or a town\
to own a name longer than 20")
# 如果客户输入的地名在地名总集中,我们有理由相信他没有输错
if ans != []:
return ans
slices = []
for i in range(len(word)):
# 这里为了保证效率,我们可以通过控制内部循环来使得表中名字串长度从小到大排列
# 并且这样排出来的结果是相似度高的在前面
for j in range(0, len(word) - i + 1):
slices.append(word[j:j + i])
for x in lst:
for i in range(1, len(word)):
if slices[-i] in x:
ans.append(x)
return ans
categorys = {"历史文化", "现代都市", "山区", "海景", "综合"}
infnum = float("inf")
class web:
# land_list是一个list对象,适用相应方法
def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(), graph_time=GraphAL(), graph_line=GraphAL()):
self.graph_money = graph_money
self.graph_time = graph_time
self.graph_line = graph_line
self.lnum = lnum
self.land_list = land_list
def is_empty(self):
return self.lnum == 0
# 获得所有景点名称,用list储存
# self._land_list是以landscape为元素的表
def _get_name(self):
if self.is_empty():
raise WebLandsError("in 'get_all_position'")
namee = []
for x in self.land_list():
namee.append(x.name)
return namee
# 获得所有景点位置
def lst_pos(self, land):
return self.land_list.index(land)
def _get_position(self):
if self.is_empty():
raise WebLandsError("in 'get_all_position'")
positionn = []
for x in self.land_list():
positionn.append(x.position)
return positionn
def add_land(self, landscape):
self.land_list.append(landscape)
self.graph_money.add_vertex()
self.graph_time.add_vertex()
self.graph_line.add_vertex()
self.lnum += 1
# 如果不设置money,time或line,自然landscape之间没有边相连
def set_all(self, land1, land2, money=infnum, time=infnum, line=1):
graph_money.add_edge(self.land_list().index(land1),
self.land_list().index(land2), money)
graph_time.add_edge(self.land_list().index(land1),
self.land_list().index(land2), time)
graph_line.add_edge(self.land_list().index(land1),
self.land_list().index(land2), line)
# 以下基于Dijkstra算法来搞定最短路径问题,可同时作用于时间,金钱和路径长度做邻接图
def set_money(self, land1, land2, money):
self.graph_money.add_edge(self.land_list.index(land1),
self.land_list.index(land2), money)
def get_money(self, land1, land2):
a = self.graph_money.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
def set_time(self, land1, land2, time):
self.graph_money.add_edge(self.land_list.index(land1),
self.land_list.index(land2), time)
def get_time(self, land1, land2):
a = self.graph_time.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
def set_line(self, land1, land2, line):
self.graph_line.add_edge(self.land_list.index(land1),
self.land_list.index(land2), line)
def get_line(self, land1, land2):
a = self.graph_line.get_edge(self.land_list.index(land1),
self.land_list.index(land2))
return a
# shortestmoney等开始
def shortest_money(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_money,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_money, vi)
path_list = [vi]
while vi != path[vj][0]:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_money_str(web, land1, land2):
str_ = ""
path, pay = shortest_money(web, land1, land2)
for i in range(len(path)):
str_ += str(web.land_list[path[i]].name)
str_ += "->"
str_ += land2.name
return "所求的最短路money路径为", str_, "总money代价为", pay
def shortest_time(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_time,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_time(), vi)
path_list = [vi]
while vi != vj:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_time_str(web, land1, land2):
str_ = ""
path, pay = shortest_time(web, land1, land2)
for i in range(len(path)):
str_ += str(path[i])
return "所求的最短路time路径为", str_, "总time代价为", pay
def shortest_line(web, land1, land2):
vi = web.lst_pos(land1)
vj = web.lst_pos(land2)
if vi == vj:
raise ValuError("in shortest_line,\
if the begining is the same as the ending, you don't have to pay anything")
path = dijkstra_shortest_paths(web.graph_line(), vi)
path_list = [vi]
while vi != vj:
path_list.append(path[vj][0])
vi = path[vj][0]
return path_list, path[vj][1]
def shortest_time_str(web, land1, land2):
str_ = ""
path, pay = shortest_line(web, land1, land2)
for i in range(len(path)):
str_ += str(path[i])
return "所求的最短路line路径为", str_, "总line代价为", pay
# shortest等结束
class landscape: # landscape代表一个景点,rank表示在图中list的位置
def __init__(self, name, position, category=None, hot=0): # 其中position是一个数,代表一个景点
self.name = name
self.position = position
self.category = category
self.hot = hot
def position(self):
return self._position
def category(self):
return self._category
def name(self):
return self._name
def hot(self):
return hot
def set_category(self, sorts):
if sorts not in categorys:
raise ValuError("in set_category, we do not have {}".format(sorts))
self.category = sorts
# 对于多目标问题,先用既有方法构造一个web,web保存了所有目标landscape
# 现在基于Prim算法给出一个关于多目标问题的算法,其实就是最小生成树问题
def muti_aim_solve(land_list):
sub_web = web()
for x in land_list:
sub_web.add_land(x)
lanst = web.land_list().copy()
for x in lanst:
for y in lanst:
if x == y:
continue
vi = lst_pos(web, x)
vj = lst_pos(web, y)
a, b, c = Edges([0, 2, 4])
lst = ["东方明珠", "西湖", "迪士尼"]
china = web(3, lst, a, b, c)
|
normal
|
{
"blob_id": "b5ec6e0fc4239a53a882b455a113eaac4db6cef5",
"index": 2331,
"step-1": "<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n <mask token>\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n <mask token>\n <mask token>\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef find_by_word(lst, word):\n ans = []\n for x in lst:\n if word == x:\n ans.append(x)\n if len(word) > 20:\n raise ValuError(\n \"in find_by_word, we don't think it's possible for a city or a town to own a name longer than 20\"\n )\n if ans != []:\n return ans\n slices = []\n for i in range(len(word)):\n for j in range(0, len(word) - i + 1):\n slices.append(word[j:j + i])\n for x in lst:\n for i in range(1, len(word)):\n if slices[-i] in x:\n ans.append(x)\n return ans\n\n\n<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\ndef shortest_money(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_money, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_money, vi)\n path_list = [vi]\n while vi != path[vj][0]:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\n<mask token>\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\ndef muti_aim_solve(land_list):\n sub_web = web()\n for x in land_list:\n sub_web.add_land(x)\n lanst = web.land_list().copy()\n for x in lanst:\n for y in lanst:\n if x == y:\n continue\n vi = lst_pos(web, x)\n vj = lst_pos(web, y)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef find_by_word(lst, word):\n ans = []\n for x in lst:\n if word == x:\n ans.append(x)\n if len(word) > 20:\n raise ValuError(\n \"in find_by_word, we don't think it's possible for a city or a town to own a name longer than 20\"\n )\n if ans != []:\n return ans\n slices = []\n for i in range(len(word)):\n for j in range(0, len(word) - i + 1):\n slices.append(word[j:j + i])\n for x in lst:\n for i in range(1, len(word)):\n if slices[-i] in x:\n ans.append(x)\n return ans\n\n\n<mask token>\n\n\nclass web:\n\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(),\n graph_time=GraphAL(), graph_line=GraphAL()):\n self.graph_money = graph_money\n self.graph_time = graph_time\n self.graph_line = graph_line\n self.lnum = lnum\n self.land_list = land_list\n\n def is_empty(self):\n return self.lnum == 0\n\n def _get_name(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n namee = []\n for x in self.land_list():\n namee.append(x.name)\n return namee\n\n def lst_pos(self, land):\n return self.land_list.index(land)\n\n def _get_position(self):\n if self.is_empty():\n raise WebLandsError(\"in 'get_all_position'\")\n positionn = []\n for x in self.land_list():\n positionn.append(x.position)\n return positionn\n\n def add_land(self, landscape):\n self.land_list.append(landscape)\n self.graph_money.add_vertex()\n self.graph_time.add_vertex()\n self.graph_line.add_vertex()\n self.lnum += 1\n\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\n graph_money.add_edge(self.land_list().index(land1), self.land_list(\n ).index(land2), money)\n graph_time.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), time)\n graph_line.add_edge(self.land_list().index(land1), self.land_list()\n .index(land2), line)\n\n def set_money(self, land1, land2, money):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), money)\n\n def get_money(self, land1, land2):\n a = self.graph_money.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_time(self, land1, land2, time):\n self.graph_money.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), time)\n\n def get_time(self, land1, land2):\n a = self.graph_time.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n def set_line(self, land1, land2, line):\n self.graph_line.add_edge(self.land_list.index(land1), self.\n land_list.index(land2), line)\n\n def get_line(self, land1, land2):\n a = self.graph_line.get_edge(self.land_list.index(land1), self.\n land_list.index(land2))\n return a\n\n\ndef shortest_money(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_money, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_money, vi)\n path_list = [vi]\n while vi != path[vj][0]:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_money_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_money(web, land1, land2)\n for i in range(len(path)):\n str_ += str(web.land_list[path[i]].name)\n str_ += '->'\n str_ += land2.name\n return '所求的最短路money路径为', str_, '总money代价为', pay\n\n\ndef shortest_time(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_time, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_time(), vi)\n path_list = [vi]\n while vi != vj:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_time_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_time(web, land1, land2)\n for i in range(len(path)):\n str_ += str(path[i])\n return '所求的最短路time路径为', str_, '总time代价为', pay\n\n\ndef shortest_line(web, land1, land2):\n vi = web.lst_pos(land1)\n vj = web.lst_pos(land2)\n if vi == vj:\n raise ValuError(\n \"in shortest_line, if the begining is the same as the ending, you don't have to pay anything\"\n )\n path = dijkstra_shortest_paths(web.graph_line(), vi)\n path_list = [vi]\n while vi != vj:\n path_list.append(path[vj][0])\n vi = path[vj][0]\n return path_list, path[vj][1]\n\n\ndef shortest_time_str(web, land1, land2):\n str_ = ''\n path, pay = shortest_line(web, land1, land2)\n for i in range(len(path)):\n str_ += str(path[i])\n return '所求的最短路line路径为', str_, '总line代价为', pay\n\n\nclass landscape:\n\n def __init__(self, name, position, category=None, hot=0):\n self.name = name\n self.position = position\n self.category = category\n self.hot = hot\n\n def position(self):\n return self._position\n\n def category(self):\n return self._category\n\n def name(self):\n return self._name\n\n def hot(self):\n return hot\n\n def set_category(self, sorts):\n if sorts not in categorys:\n raise ValuError('in set_category, we do not have {}'.format(sorts))\n self.category = sorts\n\n\ndef muti_aim_solve(land_list):\n sub_web = web()\n for x in land_list:\n sub_web.add_land(x)\n lanst = web.land_list().copy()\n for x in lanst:\n for y in lanst:\n if x == y:\n continue\n vi = lst_pos(web, x)\n vj = lst_pos(web, y)\n\n\n<mask token>\n",
"step-5": "from Graph import *\r\nfrom PrioQueue import *\r\nfrom GShortestPath import *\r\nfrom GSpanTree import *\r\nfrom User import *\r\ninfinity = float(\"inf\")\r\n\r\n\r\n# 这是根据关键字找地点的方法,已经形成了某个依据属性的表后,通过关键词匹配来解决问题\r\n# 最终输出一个yield出的迭代器,将其list化后就可以向末端输出了\r\ndef find_by_word(lst, word):\r\n # 这个是字符串匹配函数,word是客户输入,lst是循环的东西\r\n # 最好排成优先队列\r\n # 若没找到,我们可以造一个关于word的任意位置的切片,长度比word短,由此来寻找想要的名称\r\n # 由于景点,地名的长度一般不长,所以即使这里的时间代价极高,我们也可以保证这样做不会引发混乱\r\n ans = []\r\n for x in lst:\r\n if word == x:\r\n ans.append(x)\r\n if len(word) > 20:\r\n raise ValuError(\"in find_by_word, we don't think it's possible for a city or a town\\\r\n to own a name longer than 20\")\r\n # 如果客户输入的地名在地名总集中,我们有理由相信他没有输错\r\n if ans != []:\r\n return ans\r\n slices = []\r\n for i in range(len(word)):\r\n # 这里为了保证效率,我们可以通过控制内部循环来使得表中名字串长度从小到大排列\r\n # 并且这样排出来的结果是相似度高的在前面\r\n for j in range(0, len(word) - i + 1):\r\n slices.append(word[j:j + i])\r\n for x in lst:\r\n for i in range(1, len(word)):\r\n if slices[-i] in x:\r\n ans.append(x)\r\n return ans\r\n\r\n\r\ncategorys = {\"历史文化\", \"现代都市\", \"山区\", \"海景\", \"综合\"}\r\ninfnum = float(\"inf\")\r\n\r\n\r\nclass web:\r\n # land_list是一个list对象,适用相应方法\r\n\r\n def __init__(self, lnum=0, land_list=[], graph_money=GraphAL(), graph_time=GraphAL(), graph_line=GraphAL()):\r\n self.graph_money = graph_money\r\n self.graph_time = graph_time\r\n self.graph_line = graph_line\r\n self.lnum = lnum\r\n self.land_list = land_list\r\n\r\n def is_empty(self):\r\n return self.lnum == 0\r\n # 获得所有景点名称,用list储存\r\n # self._land_list是以landscape为元素的表\r\n\r\n def _get_name(self):\r\n if self.is_empty():\r\n raise WebLandsError(\"in 'get_all_position'\")\r\n namee = []\r\n for x in self.land_list():\r\n namee.append(x.name)\r\n return namee\r\n # 获得所有景点位置\r\n\r\n def lst_pos(self, land):\r\n return self.land_list.index(land)\r\n\r\n def _get_position(self):\r\n if self.is_empty():\r\n raise WebLandsError(\"in 'get_all_position'\")\r\n positionn = []\r\n for x in self.land_list():\r\n positionn.append(x.position)\r\n return positionn\r\n\r\n def add_land(self, landscape):\r\n self.land_list.append(landscape)\r\n self.graph_money.add_vertex()\r\n self.graph_time.add_vertex()\r\n self.graph_line.add_vertex()\r\n self.lnum += 1\r\n\r\n # 如果不设置money,time或line,自然landscape之间没有边相连\r\n\r\n def set_all(self, land1, land2, money=infnum, time=infnum, line=1):\r\n graph_money.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), money)\r\n graph_time.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), time)\r\n graph_line.add_edge(self.land_list().index(land1),\r\n self.land_list().index(land2), line)\r\n\r\n# 以下基于Dijkstra算法来搞定最短路径问题,可同时作用于时间,金钱和路径长度做邻接图\r\n def set_money(self, land1, land2, money):\r\n self.graph_money.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), money)\r\n\r\n def get_money(self, land1, land2):\r\n a = self.graph_money.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n def set_time(self, land1, land2, time):\r\n self.graph_money.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), time)\r\n\r\n def get_time(self, land1, land2):\r\n a = self.graph_time.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n def set_line(self, land1, land2, line):\r\n self.graph_line.add_edge(self.land_list.index(land1),\r\n self.land_list.index(land2), line)\r\n\r\n def get_line(self, land1, land2):\r\n a = self.graph_line.get_edge(self.land_list.index(land1),\r\n self.land_list.index(land2))\r\n return a\r\n\r\n# shortestmoney等开始\r\n\r\n\r\ndef shortest_money(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_money,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_money, vi)\r\n path_list = [vi]\r\n while vi != path[vj][0]:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_money_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_money(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(web.land_list[path[i]].name)\r\n str_ += \"->\"\r\n str_ += land2.name\r\n return \"所求的最短路money路径为\", str_, \"总money代价为\", pay\r\n\r\n\r\ndef shortest_time(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_time,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_time(), vi)\r\n path_list = [vi]\r\n while vi != vj:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_time_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_time(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(path[i])\r\n return \"所求的最短路time路径为\", str_, \"总time代价为\", pay\r\n\r\n\r\ndef shortest_line(web, land1, land2):\r\n vi = web.lst_pos(land1)\r\n vj = web.lst_pos(land2)\r\n if vi == vj:\r\n raise ValuError(\"in shortest_line,\\\r\n if the begining is the same as the ending, you don't have to pay anything\")\r\n path = dijkstra_shortest_paths(web.graph_line(), vi)\r\n path_list = [vi]\r\n while vi != vj:\r\n path_list.append(path[vj][0])\r\n vi = path[vj][0]\r\n return path_list, path[vj][1]\r\n\r\n\r\ndef shortest_time_str(web, land1, land2):\r\n str_ = \"\"\r\n path, pay = shortest_line(web, land1, land2)\r\n for i in range(len(path)):\r\n str_ += str(path[i])\r\n return \"所求的最短路line路径为\", str_, \"总line代价为\", pay\r\n# shortest等结束\r\n\r\n\r\nclass landscape: # landscape代表一个景点,rank表示在图中list的位置\r\n\r\n def __init__(self, name, position, category=None, hot=0): # 其中position是一个数,代表一个景点\r\n self.name = name\r\n self.position = position\r\n self.category = category\r\n self.hot = hot\r\n\r\n def position(self):\r\n return self._position\r\n\r\n def category(self):\r\n return self._category\r\n\r\n def name(self):\r\n return self._name\r\n\r\n def hot(self):\r\n return hot\r\n\r\n def set_category(self, sorts):\r\n if sorts not in categorys:\r\n raise ValuError(\"in set_category, we do not have {}\".format(sorts))\r\n self.category = sorts\r\n\r\n# 对于多目标问题,先用既有方法构造一个web,web保存了所有目标landscape\r\n# 现在基于Prim算法给出一个关于多目标问题的算法,其实就是最小生成树问题\r\n\r\n\r\ndef muti_aim_solve(land_list):\r\n sub_web = web()\r\n for x in land_list:\r\n sub_web.add_land(x)\r\n lanst = web.land_list().copy()\r\n for x in lanst:\r\n for y in lanst:\r\n if x == y:\r\n continue\r\n vi = lst_pos(web, x)\r\n vj = lst_pos(web, y)\r\n\r\na, b, c = Edges([0, 2, 4])\r\nlst = [\"东方明珠\", \"西湖\", \"迪士尼\"]\r\nchina = web(3, lst, a, b, c)\r\n",
"step-ids": [
15,
21,
24,
29,
32
]
}
|
[
15,
21,
24,
29,
32
] |
# A program to display and find the sum of a list of numbers using for loop
list=[10,20,30,40,50]
sum=0;
for i in list:
print(i)
sum=sum+i
print('sum =',sum)
|
normal
|
{
"blob_id": "88e34ee5cd5af7d3b04321c4aa4fc815f926add1",
"index": 7110,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in list:\n print(i)\n sum = sum + i\nprint('sum =', sum)\n",
"step-3": "list = [10, 20, 30, 40, 50]\nsum = 0\nfor i in list:\n print(i)\n sum = sum + i\nprint('sum =', sum)\n",
"step-4": "# A program to display and find the sum of a list of numbers using for loop\r\n\r\nlist=[10,20,30,40,50]\r\nsum=0;\r\n\r\nfor i in list:\r\n\tprint(i)\r\n\tsum=sum+i\r\nprint('sum =',sum)\t",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import unittest
from selenium import webdriver
from appium import webdriver
from time import sleep
import os
from PublicResour import Desired_Capabilities
"""
登录状态下检查“我的”界面的所有的功能模块
大部分执行用例时在“我的”界面
"""
#Return ads path relative to this file not cwd
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
class My(unittest.TestCase):
def setUp(self):
desired_caps = Desired_Capabilities.startdevices()
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
print u'设备配置成功'
sleep(5)
def test_myFavorite(self):
print u'进入首页了----'
make_commic = self.driver.find_elements_by_class_name("android.view.View")
make_commic[0].click()
sleep(5)
favorite_comic = self.driver.find_element_by_id("com.manboker.headportrait:id/comic_praise_iv")
if (favorite_comic.is_selected() == True):
pass
else:
favorite_comic.click()
sleep(1)
print u'漫画已收藏'
main_entry = self.driver.find_element_by_id("com.manboker.headportrait:id/comics_main_top_view_to_entry_iv")
main_entry.click()
sleep(1)
print u'返回到主界面'
head_icon = self.driver.find_element_by_id("com.manboker.headportrait:id/entry_album_set_icon")
head_icon.click()
sleep(1)
select_myfavorite = self.driver.find_element_by_id("com.manboker.headportrait:id/set_favorite_tv")
select_myfavorite.click()
sleep(3)
print u'进入我的收藏'
edit_favorite = self.driver.find_element_by_id("com.manboker.headportrait:id/edit_iv")
edit_favorite.click()
sleep(1)
item_comic_favorite= self.driver.find_element_by_id("com.manboker.headportrait:id/item_layout_0_iv")
item_comic_favorite.click()
sleep(1)
delete_comic_favorite = self.driver.find_element_by_id("com.manboker.headportrait:id/delete_tv")
delete_comic_favorite.click()
sleep(1)
confirm_delete = self.driver.find_element_by_id("android:id/button1")
confirm_delete.click()
sleep(1)
print u'你已经把漫画删除了, 表情改版没做好暂时不过表情模块'
back_my = self.driver.find_element_by_id("com.manboker.headportrait:id/iv_back")
back_my.click()
sleep(2)
self.driver.find_element_by_id("com.manboker.headportrait:id/set_set_goback").click()
sleep(2)
def test_aboutMe(self):
head_icon = self.driver.find_element_by_id("com.manboker.headportrait:id/entry_album_set_icon")
head_icon.click()
sleep(1)
print u'进入个人空间'
select_aboutme = self.driver.find_element_by_name("我的空间")
select_aboutme.click()
sleep(3)
user_headicon = self.driver.find_element_by_id("com.manboker.headportrait:id/specific_user_headicon")
user_headicon.click()
sleep(3)
self.driver.get_screenshot_as_file('C:\Pycharm\Manboker\MainMy\Screenshot\userhead' + '.jpg')
sleep(1)
self.driver.find_element_by_id("com.manboker.headportrait:id/community_comment_adjust_imageview").click()
sleep(1)
self.driver.swipe(1000,600,1000,900,1000)
sleep(1)
self.driver.get_screenshot_as_file('C:\Pycharm\Manboker\MainMy\Screenshot\AboutMe' + '.jpg')
print u'-----个人空间检查完毕-----'
go_backmy = self.driver.find_element_by_id("com.manboker.headportrait:id/topic_specific_user_goback")
go_backmy.click()
sleep(2)
self.driver.find_element_by_id("com.manboker.headportrait:id/set_set_goback").click()
sleep(2)
def test_myFollowing(self):
head_icon = self.driver.find_element_by_id("com.manboker.headportrait:id/entry_album_set_icon")
head_icon.click()
sleep(1)
print u'进入我的关注'
select_myfollowing = self.driver.find_element_by_name("我的关注")
select_myfollowing.click()
sleep(2)
#添加关注
add_following = self.driver.find_element_by_id("com.manboker.headportrait:id/t_fans_image")
add_following.click()
sleep(2)
#刷新后再次关注好友和取消关注
self.driver.swipe(1000, 600, 1000, 900, 1000)
sleep(3)
add_following.click()
sleep(2)
cancel_following = add_following
cancel_following.click()
sleep(1)
find_follows = self.driver.find_element_by_id("com.manboker.headportrait:id/t_follows_find")
find_follows.click()
sleep(2)
#换一换
refresh_friends = self.driver.find_element_by_name("换一换")
refresh_friends.click()
sleep(2)
add_follow = self.driver.find_element_by_id("com.manboker.headportrait:id/add_follow")
add_follow.click()
#返回到我的界面
self.driver.find_element_by_id("com.manboker.headportrait:id/t_find_back").click()
sleep(2)
go_backmy = self.driver.find_element_by_id("com.manboker.headportrait:id/t_follows_back")
go_backmy.click()
sleep(2)
self.driver.find_element_by_id("com.manboker.headportrait:id/set_set_goback").click()
sleep(2)
def test_Followers(self):
head_icon = self.driver.find_element_by_id("com.manboker.headportrait:id/entry_album_set_icon")
head_icon.click()
sleep(1)
print u'进入我的粉丝'
select_followers = self.driver.find_element_by_name("我的粉丝")
select_followers.click()
sleep(2)
self.driver.swipe(1000, 600, 1000, 900, 1000)
sleep(2)
self.driver.swipe(1000, 900, 1000, 600, 1000)
sleep(2)
go_backmy = self.driver.find_element_by_id("com.manboker.headportrait:id/topic_paise_list_goback")
go_backmy.click()
self.driver.find_element_by_id("com.manboker.headportrait:id/set_set_goback").click()
sleep(2)
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(My)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
normal
|
{
"blob_id": "883a50cf380b08c479c30edad3a2b61a6f3075cc",
"index": 4030,
"step-1": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\nimport unittest\nfrom selenium import webdriver\nfrom appium import webdriver\nfrom time import sleep\nimport os\nfrom PublicResour import Desired_Capabilities\n\n\"\"\"\n 登录状态下检查“我的”界面的所有的功能模块\n 大部分执行用例时在“我的”界面\n\"\"\"\n\n#Return ads path relative to this file not cwd\nPATH = lambda p: os.path.abspath(\n os.path.join(os.path.dirname(__file__), p)\n)\n\nclass My(unittest.TestCase):\n def setUp(self):\n desired_caps = Desired_Capabilities.startdevices()\n self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)\n print u'设备配置成功'\n sleep(5)\n\n def test_myFavorite(self):\n print u'进入首页了----'\n make_commic = self.driver.find_elements_by_class_name(\"android.view.View\")\n make_commic[0].click()\n sleep(5)\n favorite_comic = self.driver.find_element_by_id(\"com.manboker.headportrait:id/comic_praise_iv\")\n if (favorite_comic.is_selected() == True):\n pass\n else:\n favorite_comic.click()\n sleep(1)\n print u'漫画已收藏'\n\n main_entry = self.driver.find_element_by_id(\"com.manboker.headportrait:id/comics_main_top_view_to_entry_iv\")\n main_entry.click()\n sleep(1)\n print u'返回到主界面'\n\n head_icon = self.driver.find_element_by_id(\"com.manboker.headportrait:id/entry_album_set_icon\")\n head_icon.click()\n sleep(1)\n\n select_myfavorite = self.driver.find_element_by_id(\"com.manboker.headportrait:id/set_favorite_tv\")\n select_myfavorite.click()\n sleep(3)\n print u'进入我的收藏'\n\n edit_favorite = self.driver.find_element_by_id(\"com.manboker.headportrait:id/edit_iv\")\n edit_favorite.click()\n sleep(1)\n item_comic_favorite= self.driver.find_element_by_id(\"com.manboker.headportrait:id/item_layout_0_iv\")\n item_comic_favorite.click()\n sleep(1)\n delete_comic_favorite = self.driver.find_element_by_id(\"com.manboker.headportrait:id/delete_tv\")\n delete_comic_favorite.click()\n sleep(1)\n confirm_delete = self.driver.find_element_by_id(\"android:id/button1\")\n confirm_delete.click()\n sleep(1)\n print u'你已经把漫画删除了, 表情改版没做好暂时不过表情模块'\n back_my = self.driver.find_element_by_id(\"com.manboker.headportrait:id/iv_back\")\n back_my.click()\n sleep(2)\n\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/set_set_goback\").click()\n sleep(2)\n\n def test_aboutMe(self):\n\n head_icon = self.driver.find_element_by_id(\"com.manboker.headportrait:id/entry_album_set_icon\")\n head_icon.click()\n sleep(1)\n print u'进入个人空间'\n select_aboutme = self.driver.find_element_by_name(\"我的空间\")\n select_aboutme.click()\n sleep(3)\n user_headicon = self.driver.find_element_by_id(\"com.manboker.headportrait:id/specific_user_headicon\")\n user_headicon.click()\n sleep(3)\n self.driver.get_screenshot_as_file('C:\\Pycharm\\Manboker\\MainMy\\Screenshot\\userhead' + '.jpg')\n sleep(1)\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/community_comment_adjust_imageview\").click()\n sleep(1)\n self.driver.swipe(1000,600,1000,900,1000)\n sleep(1)\n self.driver.get_screenshot_as_file('C:\\Pycharm\\Manboker\\MainMy\\Screenshot\\AboutMe' + '.jpg')\n print u'-----个人空间检查完毕-----'\n go_backmy = self.driver.find_element_by_id(\"com.manboker.headportrait:id/topic_specific_user_goback\")\n go_backmy.click()\n sleep(2)\n\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/set_set_goback\").click()\n sleep(2)\n\n def test_myFollowing(self):\n\n head_icon = self.driver.find_element_by_id(\"com.manboker.headportrait:id/entry_album_set_icon\")\n head_icon.click()\n sleep(1)\n print u'进入我的关注'\n select_myfollowing = self.driver.find_element_by_name(\"我的关注\")\n select_myfollowing.click()\n sleep(2)\n #添加关注\n add_following = self.driver.find_element_by_id(\"com.manboker.headportrait:id/t_fans_image\")\n add_following.click()\n sleep(2)\n #刷新后再次关注好友和取消关注\n self.driver.swipe(1000, 600, 1000, 900, 1000)\n sleep(3)\n add_following.click()\n sleep(2)\n cancel_following = add_following\n cancel_following.click()\n sleep(1)\n\n find_follows = self.driver.find_element_by_id(\"com.manboker.headportrait:id/t_follows_find\")\n find_follows.click()\n sleep(2)\n #换一换\n refresh_friends = self.driver.find_element_by_name(\"换一换\")\n refresh_friends.click()\n sleep(2)\n\n add_follow = self.driver.find_element_by_id(\"com.manboker.headportrait:id/add_follow\")\n add_follow.click()\n #返回到我的界面\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/t_find_back\").click()\n sleep(2)\n go_backmy = self.driver.find_element_by_id(\"com.manboker.headportrait:id/t_follows_back\")\n go_backmy.click()\n sleep(2)\n\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/set_set_goback\").click()\n sleep(2)\n\n def test_Followers(self):\n head_icon = self.driver.find_element_by_id(\"com.manboker.headportrait:id/entry_album_set_icon\")\n head_icon.click()\n sleep(1)\n\n print u'进入我的粉丝'\n select_followers = self.driver.find_element_by_name(\"我的粉丝\")\n select_followers.click()\n sleep(2)\n\n self.driver.swipe(1000, 600, 1000, 900, 1000)\n sleep(2)\n self.driver.swipe(1000, 900, 1000, 600, 1000)\n sleep(2)\n\n go_backmy = self.driver.find_element_by_id(\"com.manboker.headportrait:id/topic_paise_list_goback\")\n go_backmy.click()\n\n self.driver.find_element_by_id(\"com.manboker.headportrait:id/set_set_goback\").click()\n sleep(2)\n\nif __name__ == \"__main__\":\n suite = unittest.TestLoader().loadTestsFromTestCase(My)\n unittest.TextTestRunner(verbosity=2).run(suite)\n# unittest.main()\n\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from tkinter import *
# Everything in tkinter is a widget
# We start with the Root Widget
root = Tk()
# Creating a Label Widget
myLabel1 = Label(root, text="Hello User!")
myLabel2 = Label(root, text="Welcome to medBOT")
# Put labels onto the screen
myLabel1.grid(row=0, column=0)
myLabel2.grid(row=1, column=0)
# Grid assigns the texts exacts in the position
# Grid creates a relative position
root.mainloop()
|
normal
|
{
"blob_id": "93fe16e5a97ec2652c4f6b8be844244d9776ea2e",
"index": 4921,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmyLabel1.grid(row=0, column=0)\nmyLabel2.grid(row=1, column=0)\nroot.mainloop()\n",
"step-3": "<mask token>\nroot = Tk()\nmyLabel1 = Label(root, text='Hello User!')\nmyLabel2 = Label(root, text='Welcome to medBOT')\nmyLabel1.grid(row=0, column=0)\nmyLabel2.grid(row=1, column=0)\nroot.mainloop()\n",
"step-4": "from tkinter import *\nroot = Tk()\nmyLabel1 = Label(root, text='Hello User!')\nmyLabel2 = Label(root, text='Welcome to medBOT')\nmyLabel1.grid(row=0, column=0)\nmyLabel2.grid(row=1, column=0)\nroot.mainloop()\n",
"step-5": "from tkinter import *\n\n# Everything in tkinter is a widget\n# We start with the Root Widget\n\nroot = Tk()\n# Creating a Label Widget\nmyLabel1 = Label(root, text=\"Hello User!\")\nmyLabel2 = Label(root, text=\"Welcome to medBOT\")\n\n# Put labels onto the screen\nmyLabel1.grid(row=0, column=0)\nmyLabel2.grid(row=1, column=0)\n# Grid assigns the texts exacts in the position\n# Grid creates a relative position\n\nroot.mainloop()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
A = input("입력해주세요.\n") #입력값을 in_AAA로 칭한다
#\n은 문법의 줄바꾸기
print(A.upper()+" World!") #in_AAA를 출력 + "World!")
#upper()는 앞의 값을 대문자화+"
|
normal
|
{
"blob_id": "8a54a71b08d10c5da9ca440e8e4f61f908e00d54",
"index": 9496,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(A.upper() + ' World!')\n",
"step-3": "A = input('입력해주세요.\\n')\nprint(A.upper() + ' World!')\n",
"step-4": "A = input(\"입력해주세요.\\n\") #입력값을 in_AAA로 칭한다\r\n #\\n은 문법의 줄바꾸기\r\n\r\nprint(A.upper()+\" World!\") #in_AAA를 출력 + \"World!\")\r\n #upper()는 앞의 값을 대문자화+\"\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.db import models
from django.contrib.auth.models import AbstractUser, BaseUserManager
class UserManager(BaseUserManager):
#Necesar pentru a scoate username de la required
def create_user(self, email, password, **kwargs):
user = self.model(email=email, **kwargs)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, password, **kwargs):
user = self.model(email=email, is_staff=True, is_superuser=True, **kwargs)
user.set_password(password)
user.save()
return user
class Utilizator(AbstractUser):
""" Tabel info utilizator
nume - extras automat din email ([nume]@gmail.com)
email - se va loga cu emailul
parola - ***
descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament
ocupatie - tipul jobului
sex - mf
varsta -
buget -
imagine_profil - imagine profil
cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile
cont_premium: regular: cont gratis poate avea activ doar un anunt,
premium: cont platit poate avea activ unul sau mai multe anunturi,
poate vedea statistici cu privire la anunturile postate
primeste prin email atunci cand un anunt a fost postat
Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite
"""
email = models.EmailField(unique=True)
descriere = models.CharField(max_length=255, blank=True)
ocupatie = models.CharField(max_length=50, blank=True, default="nespecificat")
nume = models.CharField(max_length=50, blank=True)
sex = models.CharField(max_length=1, blank=True, default="N")
varsta = models.PositiveIntegerField(blank=True, null=True)
buget = models.PositiveIntegerField(blank=False, null=True)
telefon = models.CharField(max_length=20, blank=True, default="nespecificat")
imagine_profil = models.ImageField(blank=True, upload_to="utilizatori/", default="utilizatori/imagine_profil.svg")
cont_premium = models.BooleanField(default=False)
token = models.CharField(max_length=1, blank=True)
#Scoatem field/coloanele
first_name = None
last_name = None
#Necesare pentru a inlocui username cu email
username = None
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
objects = UserManager()
def __str__(self):
return f"{self.email}"
class Meta:
verbose_name_plural = "Utilizatori"
|
normal
|
{
"blob_id": "85b8ffe1bca879acd86251e4662b33648b713588",
"index": 7243,
"step-1": "<mask token>\n\n\nclass Utilizator(AbstractUser):\n \"\"\" Tabel info utilizator \n nume - extras automat din email ([nume]@gmail.com)\n email - se va loga cu emailul\n parola - *** \n descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament\n ocupatie - tipul jobului\n sex - mf\n varsta - \n buget - \n imagine_profil - imagine profil\n cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile\n cont_premium: regular: cont gratis poate avea activ doar un anunt, \n premium: cont platit poate avea activ unul sau mai multe anunturi, \n poate vedea statistici cu privire la anunturile postate\n primeste prin email atunci cand un anunt a fost postat\n Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite\n \"\"\"\n email = models.EmailField(unique=True)\n descriere = models.CharField(max_length=255, blank=True)\n ocupatie = models.CharField(max_length=50, blank=True, default=\n 'nespecificat')\n nume = models.CharField(max_length=50, blank=True)\n sex = models.CharField(max_length=1, blank=True, default='N')\n varsta = models.PositiveIntegerField(blank=True, null=True)\n buget = models.PositiveIntegerField(blank=False, null=True)\n telefon = models.CharField(max_length=20, blank=True, default=\n 'nespecificat')\n imagine_profil = models.ImageField(blank=True, upload_to='utilizatori/',\n default='utilizatori/imagine_profil.svg')\n cont_premium = models.BooleanField(default=False)\n token = models.CharField(max_length=1, blank=True)\n first_name = None\n last_name = None\n username = None\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = []\n objects = UserManager()\n\n def __str__(self):\n return f'{self.email}'\n\n\n class Meta:\n verbose_name_plural = 'Utilizatori'\n",
"step-2": "<mask token>\n\n\nclass UserManager(BaseUserManager):\n <mask token>\n <mask token>\n\n\nclass Utilizator(AbstractUser):\n \"\"\" Tabel info utilizator \n nume - extras automat din email ([nume]@gmail.com)\n email - se va loga cu emailul\n parola - *** \n descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament\n ocupatie - tipul jobului\n sex - mf\n varsta - \n buget - \n imagine_profil - imagine profil\n cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile\n cont_premium: regular: cont gratis poate avea activ doar un anunt, \n premium: cont platit poate avea activ unul sau mai multe anunturi, \n poate vedea statistici cu privire la anunturile postate\n primeste prin email atunci cand un anunt a fost postat\n Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite\n \"\"\"\n email = models.EmailField(unique=True)\n descriere = models.CharField(max_length=255, blank=True)\n ocupatie = models.CharField(max_length=50, blank=True, default=\n 'nespecificat')\n nume = models.CharField(max_length=50, blank=True)\n sex = models.CharField(max_length=1, blank=True, default='N')\n varsta = models.PositiveIntegerField(blank=True, null=True)\n buget = models.PositiveIntegerField(blank=False, null=True)\n telefon = models.CharField(max_length=20, blank=True, default=\n 'nespecificat')\n imagine_profil = models.ImageField(blank=True, upload_to='utilizatori/',\n default='utilizatori/imagine_profil.svg')\n cont_premium = models.BooleanField(default=False)\n token = models.CharField(max_length=1, blank=True)\n first_name = None\n last_name = None\n username = None\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = []\n objects = UserManager()\n\n def __str__(self):\n return f'{self.email}'\n\n\n class Meta:\n verbose_name_plural = 'Utilizatori'\n",
"step-3": "<mask token>\n\n\nclass UserManager(BaseUserManager):\n\n def create_user(self, email, password, **kwargs):\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n return user\n <mask token>\n\n\nclass Utilizator(AbstractUser):\n \"\"\" Tabel info utilizator \n nume - extras automat din email ([nume]@gmail.com)\n email - se va loga cu emailul\n parola - *** \n descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament\n ocupatie - tipul jobului\n sex - mf\n varsta - \n buget - \n imagine_profil - imagine profil\n cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile\n cont_premium: regular: cont gratis poate avea activ doar un anunt, \n premium: cont platit poate avea activ unul sau mai multe anunturi, \n poate vedea statistici cu privire la anunturile postate\n primeste prin email atunci cand un anunt a fost postat\n Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite\n \"\"\"\n email = models.EmailField(unique=True)\n descriere = models.CharField(max_length=255, blank=True)\n ocupatie = models.CharField(max_length=50, blank=True, default=\n 'nespecificat')\n nume = models.CharField(max_length=50, blank=True)\n sex = models.CharField(max_length=1, blank=True, default='N')\n varsta = models.PositiveIntegerField(blank=True, null=True)\n buget = models.PositiveIntegerField(blank=False, null=True)\n telefon = models.CharField(max_length=20, blank=True, default=\n 'nespecificat')\n imagine_profil = models.ImageField(blank=True, upload_to='utilizatori/',\n default='utilizatori/imagine_profil.svg')\n cont_premium = models.BooleanField(default=False)\n token = models.CharField(max_length=1, blank=True)\n first_name = None\n last_name = None\n username = None\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = []\n objects = UserManager()\n\n def __str__(self):\n return f'{self.email}'\n\n\n class Meta:\n verbose_name_plural = 'Utilizatori'\n",
"step-4": "<mask token>\n\n\nclass UserManager(BaseUserManager):\n\n def create_user(self, email, password, **kwargs):\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n return user\n\n def create_superuser(self, email, password, **kwargs):\n user = self.model(email=email, is_staff=True, is_superuser=True, **\n kwargs)\n user.set_password(password)\n user.save()\n return user\n\n\nclass Utilizator(AbstractUser):\n \"\"\" Tabel info utilizator \n nume - extras automat din email ([nume]@gmail.com)\n email - se va loga cu emailul\n parola - *** \n descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament\n ocupatie - tipul jobului\n sex - mf\n varsta - \n buget - \n imagine_profil - imagine profil\n cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile\n cont_premium: regular: cont gratis poate avea activ doar un anunt, \n premium: cont platit poate avea activ unul sau mai multe anunturi, \n poate vedea statistici cu privire la anunturile postate\n primeste prin email atunci cand un anunt a fost postat\n Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite\n \"\"\"\n email = models.EmailField(unique=True)\n descriere = models.CharField(max_length=255, blank=True)\n ocupatie = models.CharField(max_length=50, blank=True, default=\n 'nespecificat')\n nume = models.CharField(max_length=50, blank=True)\n sex = models.CharField(max_length=1, blank=True, default='N')\n varsta = models.PositiveIntegerField(blank=True, null=True)\n buget = models.PositiveIntegerField(blank=False, null=True)\n telefon = models.CharField(max_length=20, blank=True, default=\n 'nespecificat')\n imagine_profil = models.ImageField(blank=True, upload_to='utilizatori/',\n default='utilizatori/imagine_profil.svg')\n cont_premium = models.BooleanField(default=False)\n token = models.CharField(max_length=1, blank=True)\n first_name = None\n last_name = None\n username = None\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = []\n objects = UserManager()\n\n def __str__(self):\n return f'{self.email}'\n\n\n class Meta:\n verbose_name_plural = 'Utilizatori'\n",
"step-5": "from django.db import models\n\nfrom django.contrib.auth.models import AbstractUser, BaseUserManager\n\n\nclass UserManager(BaseUserManager):\n #Necesar pentru a scoate username de la required\n\n def create_user(self, email, password, **kwargs):\n user = self.model(email=email, **kwargs)\n user.set_password(password)\n user.save()\n return user\n\n def create_superuser(self, email, password, **kwargs):\n user = self.model(email=email, is_staff=True, is_superuser=True, **kwargs)\n user.set_password(password)\n user.save()\n return user\n\n\nclass Utilizator(AbstractUser):\n \"\"\" Tabel info utilizator \n nume - extras automat din email ([nume]@gmail.com)\n email - se va loga cu emailul\n parola - *** \n descriere - informatiile despre utilizator scrise de acesta pentru celilati potential colegi de apartament\n ocupatie - tipul jobului\n sex - mf\n varsta - \n buget - \n imagine_profil - imagine profil\n cont_admin - are access la backend, administratorul poate gestiona utilizatorii si anunturile\n cont_premium: regular: cont gratis poate avea activ doar un anunt, \n premium: cont platit poate avea activ unul sau mai multe anunturi, \n poate vedea statistici cu privire la anunturile postate\n primeste prin email atunci cand un anunt a fost postat\n Un utilizator poate avea unul sau mai multe anunturi postate si/sau unul sau mai multe anunturi salvate la favorite\n \"\"\"\n email = models.EmailField(unique=True)\n descriere = models.CharField(max_length=255, blank=True)\n ocupatie = models.CharField(max_length=50, blank=True, default=\"nespecificat\")\n nume = models.CharField(max_length=50, blank=True)\n sex = models.CharField(max_length=1, blank=True, default=\"N\")\n varsta = models.PositiveIntegerField(blank=True, null=True)\n buget = models.PositiveIntegerField(blank=False, null=True)\n telefon = models.CharField(max_length=20, blank=True, default=\"nespecificat\")\n imagine_profil = models.ImageField(blank=True, upload_to=\"utilizatori/\", default=\"utilizatori/imagine_profil.svg\")\n cont_premium = models.BooleanField(default=False)\n \n token = models.CharField(max_length=1, blank=True)\n \n #Scoatem field/coloanele \n first_name = None\n last_name = None\n\n #Necesare pentru a inlocui username cu email\n username = None\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = []\n objects = UserManager()\n \n def __str__(self):\n return f\"{self.email}\"\n\n class Meta:\n verbose_name_plural = \"Utilizatori\"\n\n ",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
"""
==============================
Visualize Cylinder with Wrench
==============================
We apply a constant body-fixed wrench to a cylinder and integrate
acceleration to twist and exponential coordinates of transformation
to finally compute the new pose of the cylinder.
"""
import numpy as np
from pytransform3d.transformations import (
transform_from_exponential_coordinates)
import pytransform3d.visualizer as pv
def spatial_inertia_of_cylinder(mass, length, radius):
I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2
I_zz = 0.5 * mass * radius ** 2
inertia = np.eye(6)
inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])
inertia[3:, 3:] *= mass
return inertia
def animation_callback(
step, cylinder, cylinder_frame, prev_cylinder2world,
Stheta_dot, inertia_inv):
if step == 0: # Reset cylinder state
prev_cylinder2world[:, :] = np.eye(4)
Stheta_dot[:] = 0.0
# Apply constant wrench
wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])
dt = 0.0005
Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)
Stheta_dot += dt * Stheta_ddot
cylinder2world = transform_from_exponential_coordinates(
dt * Stheta_dot).dot(prev_cylinder2world)
# Update visualization
cylinder_frame.set_data(cylinder2world)
cylinder.set_data(cylinder2world)
prev_cylinder2world[:, :] = cylinder2world
return cylinder_frame, cylinder
fig = pv.figure()
# Definition of cylinder
mass = 1.0
length = 0.5
radius = 0.1
inertia_inv = np.linalg.inv(
spatial_inertia_of_cylinder(mass=mass, length=length, radius=radius))
# State of cylinder
cylinder2world = np.eye(4)
twist = np.zeros(6)
cylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])
cylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)
fig.plot_transform(A2B=np.eye(4), s=0.5)
fig.view_init()
if "__file__" in globals():
fig.animate(
animation_callback, n_frames=10000,
fargs=(cylinder, cylinder_frame, cylinder2world, twist, inertia_inv),
loop=True)
fig.show()
else:
fig.save_image("__open3d_rendered_image.jpg")
|
normal
|
{
"blob_id": "2019a2a5588e57164ff4226ef3bcbbc506f2b315",
"index": 7432,
"step-1": "<mask token>\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\n<mask token>\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-3": "<mask token>\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=\n length, radius=radius))\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-4": "<mask token>\nimport numpy as np\nfrom pytransform3d.transformations import transform_from_exponential_coordinates\nimport pytransform3d.visualizer as pv\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0:\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(dt * Stheta_dot\n ).dot(prev_cylinder2world)\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n prev_cylinder2world[:, :] = cylinder2world\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(spatial_inertia_of_cylinder(mass=mass, length=\n length, radius=radius))\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\nfig.plot_transform(A2B=np.eye(4), s=0.5)\nfig.view_init()\nif '__file__' in globals():\n fig.animate(animation_callback, n_frames=10000, fargs=(cylinder,\n cylinder_frame, cylinder2world, twist, inertia_inv), loop=True)\n fig.show()\nelse:\n fig.save_image('__open3d_rendered_image.jpg')\n",
"step-5": "\"\"\"\n==============================\nVisualize Cylinder with Wrench\n==============================\n\nWe apply a constant body-fixed wrench to a cylinder and integrate\nacceleration to twist and exponential coordinates of transformation\nto finally compute the new pose of the cylinder.\n\"\"\"\nimport numpy as np\nfrom pytransform3d.transformations import (\n transform_from_exponential_coordinates)\nimport pytransform3d.visualizer as pv\n\n\ndef spatial_inertia_of_cylinder(mass, length, radius):\n I_xx = I_yy = 0.25 * mass * radius ** 2 + 1.0 / 12.0 * mass * length ** 2\n I_zz = 0.5 * mass * radius ** 2\n inertia = np.eye(6)\n inertia[:3, :3] *= np.array([I_xx, I_yy, I_zz])\n inertia[3:, 3:] *= mass\n return inertia\n\n\ndef animation_callback(\n step, cylinder, cylinder_frame, prev_cylinder2world,\n Stheta_dot, inertia_inv):\n if step == 0: # Reset cylinder state\n prev_cylinder2world[:, :] = np.eye(4)\n Stheta_dot[:] = 0.0\n\n # Apply constant wrench\n wrench_in_cylinder = np.array([0.1, 0.001, 0.001, 0.01, 1.0, 1.0])\n dt = 0.0005\n\n Stheta_ddot = np.dot(inertia_inv, wrench_in_cylinder)\n Stheta_dot += dt * Stheta_ddot\n cylinder2world = transform_from_exponential_coordinates(\n dt * Stheta_dot).dot(prev_cylinder2world)\n\n # Update visualization\n cylinder_frame.set_data(cylinder2world)\n cylinder.set_data(cylinder2world)\n\n prev_cylinder2world[:, :] = cylinder2world\n\n return cylinder_frame, cylinder\n\n\nfig = pv.figure()\n\n# Definition of cylinder\nmass = 1.0\nlength = 0.5\nradius = 0.1\ninertia_inv = np.linalg.inv(\n spatial_inertia_of_cylinder(mass=mass, length=length, radius=radius))\n\n# State of cylinder\ncylinder2world = np.eye(4)\ntwist = np.zeros(6)\n\ncylinder = fig.plot_cylinder(length=length, radius=radius, c=[1, 0.5, 0])\ncylinder_frame = fig.plot_transform(A2B=cylinder2world, s=0.5)\n\nfig.plot_transform(A2B=np.eye(4), s=0.5)\n\nfig.view_init()\n\nif \"__file__\" in globals():\n fig.animate(\n animation_callback, n_frames=10000,\n fargs=(cylinder, cylinder_frame, cylinder2world, twist, inertia_inv),\n loop=True)\n fig.show()\nelse:\n fig.save_image(\"__open3d_rendered_image.jpg\")\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
#PortableKanban 4.3.6578.38136 - Encrypted Password Retrieval
#Python3 -m pip install des
#or
#pip install des
import json
import base64
from des import * #python3 -m pip install des, pip install des
import sys
def decode(hash):
hash = base64.b64decode(hash.encode('utf-8'))
key = DesKey(b"7ly6UznJ")
return key.decrypt(hash,initial=b"XuVUm5fR",padding=True).decode('utf-8')
print(decode('XXXXXXXXXXXXXXXXXXXXXX'))
#change this to your encrypted key
|
normal
|
{
"blob_id": "136215a3ba99f74160373181c458db9bec4bb6b7",
"index": 977,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef decode(hash):\n hash = base64.b64decode(hash.encode('utf-8'))\n key = DesKey(b'7ly6UznJ')\n return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef decode(hash):\n hash = base64.b64decode(hash.encode('utf-8'))\n key = DesKey(b'7ly6UznJ')\n return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')\n\n\nprint(decode('XXXXXXXXXXXXXXXXXXXXXX'))\n",
"step-4": "import json\nimport base64\nfrom des import *\nimport sys\n\n\ndef decode(hash):\n hash = base64.b64decode(hash.encode('utf-8'))\n key = DesKey(b'7ly6UznJ')\n return key.decrypt(hash, initial=b'XuVUm5fR', padding=True).decode('utf-8')\n\n\nprint(decode('XXXXXXXXXXXXXXXXXXXXXX'))\n",
"step-5": "#PortableKanban 4.3.6578.38136 - Encrypted Password Retrieval\r\n#Python3 -m pip install des\r\n#or\r\n#pip install des\r\n\r\nimport json\r\nimport base64\r\nfrom des import * #python3 -m pip install des, pip install des\r\nimport sys\r\n\r\ndef decode(hash):\r\n\thash = base64.b64decode(hash.encode('utf-8'))\r\n\tkey = DesKey(b\"7ly6UznJ\")\r\n\treturn key.decrypt(hash,initial=b\"XuVUm5fR\",padding=True).decode('utf-8')\r\n\r\nprint(decode('XXXXXXXXXXXXXXXXXXXXXX'))\r\n\r\n#change this to your encrypted key\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Import packages
import pandas
import requests
import lxml
# Get page content
url = "https://archive.fantasysports.yahoo.com/nfl/2017/189499?lhst=sched#lhstsched"
html = requests.get(url).content
df_list = pandas.read_html(html)
# Pull relevant URLs
|
normal
|
{
"blob_id": "d46035699bee1ad9a75ea251c2c3ab8817d6a740",
"index": 4343,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurl = (\n 'https://archive.fantasysports.yahoo.com/nfl/2017/189499?lhst=sched#lhstsched'\n )\nhtml = requests.get(url).content\ndf_list = pandas.read_html(html)\n",
"step-3": "import pandas\nimport requests\nimport lxml\nurl = (\n 'https://archive.fantasysports.yahoo.com/nfl/2017/189499?lhst=sched#lhstsched'\n )\nhtml = requests.get(url).content\ndf_list = pandas.read_html(html)\n",
"step-4": "# Import packages\nimport pandas\nimport requests\nimport lxml\n\n# Get page content\nurl = \"https://archive.fantasysports.yahoo.com/nfl/2017/189499?lhst=sched#lhstsched\"\nhtml = requests.get(url).content\ndf_list = pandas.read_html(html)\n\n# Pull relevant URLs\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
def testeum():
a = 10
print(id(a))
def testedois():
a = 10
print(id(a))
|
normal
|
{
"blob_id": "a2e2528f560f6117d4ceeb9cd20d3f6f6b2a30a7",
"index": 213,
"step-1": "<mask token>\n",
"step-2": "def testeum():\n a = 10\n print(id(a))\n\n\n<mask token>\n",
"step-3": "def testeum():\n a = 10\n print(id(a))\n\n\ndef testedois():\n a = 10\n print(id(a))\n",
"step-4": "# -*- coding: utf-8 -*-\ndef testeum():\n a = 10\n print(id(a))\ndef testedois():\n a = 10\n print(id(a))",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from numpy import*
a=int(input('numero: '))
b='*'
c='o'
for i in range(a):
d=(b*(a-i))+(c*(a-(a-i)))+(c*(a-(a-i)))+(b*(a-i))
print(d)
|
normal
|
{
"blob_id": "155b243ad7d93bcf2b74cd5b2bd3409ab7ec7473",
"index": 8488,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(a):\n d = b * (a - i) + c * (a - (a - i)) + c * (a - (a - i)) + b * (a - i)\n print(d)\n",
"step-3": "<mask token>\na = int(input('numero: '))\nb = '*'\nc = 'o'\nfor i in range(a):\n d = b * (a - i) + c * (a - (a - i)) + c * (a - (a - i)) + b * (a - i)\n print(d)\n",
"step-4": "from numpy import *\na = int(input('numero: '))\nb = '*'\nc = 'o'\nfor i in range(a):\n d = b * (a - i) + c * (a - (a - i)) + c * (a - (a - i)) + b * (a - i)\n print(d)\n",
"step-5": "from numpy import*\n\na=int(input('numero: '))\nb='*'\nc='o'\nfor i in range(a):\n\td=(b*(a-i))+(c*(a-(a-i)))+(c*(a-(a-i)))+(b*(a-i))\n\tprint(d)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pygame
import textwrap
import client.Button as Btn
from client.ClickableImage import ClickableImage as ClickImg
from client.CreateDisplay import CreateDisplay
import client.LiverpoolButtons as RuleSetsButtons_LP
import client.HandAndFootButtons as RuleSetsButtons_HF
import client.HandManagement as HandManagement
from client.UICardWrapper import UICardWrapper
import client.UIConstants as UIC
from common.Card import Card
class HandView:
"""This class handles player's cards and enables actions.
Actions are primarily performed using buttons, since these need to somewhat customized by game
the buttons are in ***.py (*** is Liverpool or HandAndFoot) and are imported as RuleSetsButtons.
Management of displaying the hand's cards is not game specific, and methods that help with that
are in HandManagement.py.
Player can arrange their own hand, and prepare to play cards during other players' turns.
"""
def __init__(self, controller, display, ruleset):
self.controller = controller
self.display = display
self.ruleset = ruleset
self.Meld_Threshold = controller._state.rules.Meld_Threshold
self.deal_size = controller._state.rules.Deal_Size
self.help_text = controller._state.rules.help_text
if ruleset == 'Liverpool':
self.buttons_per_player = self.Meld_Threshold[0][0] + self.Meld_Threshold[0][1]
self.RuleSetsButtons = RuleSetsButtons_LP
elif ruleset == 'HandAndFoot':
self.RuleSetsButtons = RuleSetsButtons_HF
self.hand_scaling = (UIC.scale, UIC.Card_Spacing)
self.current_hand = []
self.last_hand = []
self.hand_info = [] # will contain UICardWrapped elements of current_hand
self.prepared_cards = [] # will contain list of prepared cards from controller
self.discards = []
self.discard_confirm = False
# num_wilds is HandAndFoot specific, only non-zero if by prepare_card_btn in HandAndFootButtons.py is triggered.
self.num_wilds = 0
self.wild_cards = []
self.selected_list = []
self.round_index = 0
self.round_advance = False
self.num_players = 1
# In Liverpool and other Shared_Board games: prepare cards buttons must be updated each round
self.need_updated_buttons = True
self.ready_color_idx = 2
self.not_ready_color_idx = 6
#
# if someone joins between rounds, then they won't know the correct meld requirement until the round begins.
# (self.controller._state.round = -1 until play commences).
# In HandAndFoot: Correct meld requirement will be written in lower right corner once play commences.
# In Liverpool: Will see correct buttons once round commences.
self.RuleSetsButtons.CreateButtons(self)
def update(self, player_index=0, num_players=1, visible_scards = []):
"""This updates the view of the hand, between rounds it displays a message. """
self.visible_scards = visible_scards
self.controller._state.player_index = player_index
if self.num_players > num_players and self.controller._state.rules.Shared_Board \
and not self.need_updated_buttons:
# A player has left the game after the round has begun -- make adjustments so game can continue.
self.playerLeftGame(num_players)
self.num_players = num_players
if self.controller._state.round == -1:
self.mesgBetweenRounds(self.help_text)
if self.round_advance:
self.round_index = self.round_index + 1
if self.round_index < len(self.Meld_Threshold):
self.help_text[0] = 'This is the round of ' + str(self.Meld_Threshold[self.round_index]) + ' ! '
self.need_updated_buttons = True # used for Liverpool.
else:
self.help_text = ['Game has concluded. Scores for each round can be found in command window.']
self.round_advance = False
else:
if not self.round_index == self.controller._state.round:
# Need this to true up round_index if a player joins mid-game.
skipped_rounds = self.controller._state.round - self.round_index
for idx in range(skipped_rounds):
#todo: How to score latecomers should be moved to ruleset.
score = 0
self.controller.lateJoinScores(score)
self.round_index = self.controller._state.round
self.round_advance = True
# reset outline colors on ready buttons to what they need to be at the start of the "between rounds" state.
self.ready_color_idx = 2
self.not_ready_color_idx = 6
self.last_hand = self.current_hand
self.current_hand = self.controller.getHand()
if len(self.current_hand) == 0:
self.hand_info = []
elif not self.last_hand == self.current_hand:
self.hand_info = HandManagement.WrapHand(self, self.current_hand, self.hand_info)
HandManagement.ShowHolding(self, self.hand_info) # displays hand
self.RuleSetsButtons.ButtonDisplay(self)
def nextEventWildsOnBoard(self):
"""This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.
It is looking for key strokes to designate ambiguous wild cards in runs.
The mouse is ignored until you designate all the wilds (turn phase goes back to play)."""
if self.controller._state.rules.Shared_Board and self.num_wilds > 0:
for self.event in pygame.event.get():
if self.event.type == pygame.QUIT:
# The window crashed, we should handle this
print("pygame crash, AAAHHH")
pygame.quit()
quit()
else:
# in Shared_Board games, check if there are wilds that need to be updated.
# All other events are ignored until play is finished.
HandManagement.wildsHiLoGetInput(self)
def nextEvent(self):
"""This submits the next user input to the controller,
In games with Shared_Board = False (e.g. HandAndFoot) key strokes don't do anything
unless designating values for prepared wild cards, at which time the mouse is ignored
unless you want to clear the prepared cards.
In games with Shared_Board = True wilds on board might change designation upon other cards being played.
IF designation cannot be handled automatically (= if wild can be at the beginning or end of a run) then
it must be designated before play is completed.
This is done in nextEvenWildsOnBoard. All other events are ignored until num_wilds == 0 OR play is canceled."""
if self.controller._state.rules.Shared_Board:
self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())
if self.num_wilds > 0:
self.nextEventWildsOnBoard()
for self.event in pygame.event.get():
if self.event.type == pygame.QUIT:
# The window crashed, we should handle this
print("pygame crash, AAAHHH")
pygame.quit()
quit()
if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:
wild_instructions = 'Use the keyboard to designate your prepared wild cards \r\n '
wild_instructions = wild_instructions + '(use 0 for 10 and J, Q, or K for facecards).'
self.controller.note = wild_instructions
pos = pygame.mouse.get_pos()
if self.event.type == pygame.MOUSEBUTTONDOWN:
self.RuleSetsButtons.ClickedButton(self, pos)
for element in self.hand_info:
# cannot select prepared cards, so not included in logic below.
if element.img_clickable.isOver(pos):
if element.status == 1:
element.status = 0
element.img_clickable.changeOutline(0)
elif element.status == 0:
element.status = 1
element.img_clickable.changeOutline(2)
elif self.event.type == pygame.MOUSEMOTION:
self.RuleSetsButtons.MouseHiLight(self, pos)
HandManagement.MouseHiLight(self.hand_info, pos)
elif self.event.type == pygame.KEYDOWN:
if self.controller._state.rules.Buy_Option:
if self.controller.buying_opportunity:
if self.event.key == pygame.K_y:
self.controller.wantTopCard(True)
self.controller.note = 'You have signaled you want to buy the card.'
elif self.event.key == pygame.K_n:
self.controller.wantTopCard(False)
self.controller.note = 'You have signaled you do not want to buy the card.'
if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:
HandManagement.ManuallyAssign(self)
def gatherSelected(self):
""" gathers selected cards
in order to take action on selected cards (either discarding them or preparing them)
"""
self.selected_list = []
for element in self.hand_info:
if element.status == 1:
self.selected_list.append(element)
return self.selected_list
def discardConfirmation(self, confirmed, wrapped_discards):
""" Confirm a user is sure about a discard and then perform it once confirmed."""
discards = []
for element in wrapped_discards:
discards.append(element.card)
if self.discards != discards:
confirmed = False
self.discards = discards
if not confirmed:
self.controller.note = "Please confirm - discard " + "{0}".format(self.discards)
return True # ask for confirmation
else:
# confirmed is True, performing discard and removing discarded wrapped cards from hand_info.
if self.discard_confirm:
controller_response = self.controller.discard(self.discards)
if controller_response:
for element in wrapped_discards:
self.hand_info.remove(element)
return False # now that this is done, we don't have anything waiting on confirmation
def mesgBetweenRounds(self, message):
"""print message where cards usually displayed until Ready button is clicked for next round."""
font = UIC.Medium_Text
y_offset = (UIC.Disp_Height * (1 - (UIC.Hand_Row_Fraction * 0.8)))
for message_string in message:
text_surface = font.render(message_string, True, UIC.Black)
text_rect = text_surface.get_rect()
text_rect.center = ((UIC.Disp_Width * 0.5), y_offset)
y_offset = y_offset + UIC.Medium_Text_Feed
self.display.blit(text_surface, text_rect)
def labelMedium(self, labelstr, x_offset, y_offset):
font = UIC.Medium_Text
text_surface = font.render(labelstr, True, UIC.Bright_Blue)
text_rect = text_surface.get_rect()
text_rect.center = (x_offset, y_offset)
self.display.blit(text_surface, text_rect)
def playerLeftGame(self, num_players):
# a player has disconnected a game with a Shared_Board = True. Must make adjustments to
# (i) card group dictionaries, (ii) prepared cards & (iii) buttons locations.
self.controller.resetProcessedCards(self.visible_scards)
self.controller.clearPreparedCards() # so that prepared cards won't be mistakenly played on wrong group.
self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.hand_info)
self.controller.note = "A player has left the game, all prepared cards are automatically cleared."
# reset set/run button locations:
if num_players > 1:
players_sp_w = UIC.Disp_Width / num_players
else:
players_sp_w = UIC.Disp_Width
for idx in range(num_players):
for button in self.assign_cards_btns[idx]:
button.x = 10 + (players_sp_w * idx)
|
normal
|
{
"blob_id": "1cdd315eec6792a8588dc2e6a221bc024be47078",
"index": 7885,
"step-1": "<mask token>\n\n\nclass HandView:\n <mask token>\n\n def __init__(self, controller, display, ruleset):\n self.controller = controller\n self.display = display\n self.ruleset = ruleset\n self.Meld_Threshold = controller._state.rules.Meld_Threshold\n self.deal_size = controller._state.rules.Deal_Size\n self.help_text = controller._state.rules.help_text\n if ruleset == 'Liverpool':\n self.buttons_per_player = self.Meld_Threshold[0][0\n ] + self.Meld_Threshold[0][1]\n self.RuleSetsButtons = RuleSetsButtons_LP\n elif ruleset == 'HandAndFoot':\n self.RuleSetsButtons = RuleSetsButtons_HF\n self.hand_scaling = UIC.scale, UIC.Card_Spacing\n self.current_hand = []\n self.last_hand = []\n self.hand_info = []\n self.prepared_cards = []\n self.discards = []\n self.discard_confirm = False\n self.num_wilds = 0\n self.wild_cards = []\n self.selected_list = []\n self.round_index = 0\n self.round_advance = False\n self.num_players = 1\n self.need_updated_buttons = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.RuleSetsButtons.CreateButtons(self)\n\n def update(self, player_index=0, num_players=1, visible_scards=[]):\n \"\"\"This updates the view of the hand, between rounds it displays a message. \"\"\"\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if (self.num_players > num_players and self.controller._state.rules\n .Shared_Board and not self.need_updated_buttons):\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.\n Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True\n else:\n self.help_text = [\n 'Game has concluded. Scores for each round can be found in command window.'\n ]\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n skipped_rounds = (self.controller._state.round - self.\n round_index)\n for idx in range(skipped_rounds):\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.\n current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info)\n self.RuleSetsButtons.ButtonDisplay(self)\n\n def nextEventWildsOnBoard(self):\n \"\"\"This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.\n\n It is looking for key strokes to designate ambiguous wild cards in runs.\n The mouse is ignored until you designate all the wilds (turn phase goes back to play).\"\"\"\n if self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n else:\n HandManagement.wildsHiLoGetInput(self)\n <mask token>\n <mask token>\n\n def discardConfirmation(self, confirmed, wrapped_discards):\n \"\"\" Confirm a user is sure about a discard and then perform it once confirmed.\"\"\"\n discards = []\n for element in wrapped_discards:\n discards.append(element.card)\n if self.discards != discards:\n confirmed = False\n self.discards = discards\n if not confirmed:\n self.controller.note = 'Please confirm - discard ' + '{0}'.format(\n self.discards)\n return True\n else:\n if self.discard_confirm:\n controller_response = self.controller.discard(self.discards)\n if controller_response:\n for element in wrapped_discards:\n self.hand_info.remove(element)\n return False\n <mask token>\n\n def labelMedium(self, labelstr, x_offset, y_offset):\n font = UIC.Medium_Text\n text_surface = font.render(labelstr, True, UIC.Bright_Blue)\n text_rect = text_surface.get_rect()\n text_rect.center = x_offset, y_offset\n self.display.blit(text_surface, text_rect)\n\n def playerLeftGame(self, num_players):\n self.controller.resetProcessedCards(self.visible_scards)\n self.controller.clearPreparedCards()\n self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.\n hand_info)\n self.controller.note = (\n 'A player has left the game, all prepared cards are automatically cleared.'\n )\n if num_players > 1:\n players_sp_w = UIC.Disp_Width / num_players\n else:\n players_sp_w = UIC.Disp_Width\n for idx in range(num_players):\n for button in self.assign_cards_btns[idx]:\n button.x = 10 + players_sp_w * idx\n",
"step-2": "<mask token>\n\n\nclass HandView:\n <mask token>\n\n def __init__(self, controller, display, ruleset):\n self.controller = controller\n self.display = display\n self.ruleset = ruleset\n self.Meld_Threshold = controller._state.rules.Meld_Threshold\n self.deal_size = controller._state.rules.Deal_Size\n self.help_text = controller._state.rules.help_text\n if ruleset == 'Liverpool':\n self.buttons_per_player = self.Meld_Threshold[0][0\n ] + self.Meld_Threshold[0][1]\n self.RuleSetsButtons = RuleSetsButtons_LP\n elif ruleset == 'HandAndFoot':\n self.RuleSetsButtons = RuleSetsButtons_HF\n self.hand_scaling = UIC.scale, UIC.Card_Spacing\n self.current_hand = []\n self.last_hand = []\n self.hand_info = []\n self.prepared_cards = []\n self.discards = []\n self.discard_confirm = False\n self.num_wilds = 0\n self.wild_cards = []\n self.selected_list = []\n self.round_index = 0\n self.round_advance = False\n self.num_players = 1\n self.need_updated_buttons = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.RuleSetsButtons.CreateButtons(self)\n\n def update(self, player_index=0, num_players=1, visible_scards=[]):\n \"\"\"This updates the view of the hand, between rounds it displays a message. \"\"\"\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if (self.num_players > num_players and self.controller._state.rules\n .Shared_Board and not self.need_updated_buttons):\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.\n Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True\n else:\n self.help_text = [\n 'Game has concluded. Scores for each round can be found in command window.'\n ]\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n skipped_rounds = (self.controller._state.round - self.\n round_index)\n for idx in range(skipped_rounds):\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.\n current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info)\n self.RuleSetsButtons.ButtonDisplay(self)\n\n def nextEventWildsOnBoard(self):\n \"\"\"This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.\n\n It is looking for key strokes to designate ambiguous wild cards in runs.\n The mouse is ignored until you designate all the wilds (turn phase goes back to play).\"\"\"\n if self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n else:\n HandManagement.wildsHiLoGetInput(self)\n\n def nextEvent(self):\n \"\"\"This submits the next user input to the controller,\n\n In games with Shared_Board = False (e.g. HandAndFoot) key strokes don't do anything\n unless designating values for prepared wild cards, at which time the mouse is ignored\n unless you want to clear the prepared cards.\n In games with Shared_Board = True wilds on board might change designation upon other cards being played.\n IF designation cannot be handled automatically (= if wild can be at the beginning or end of a run) then\n it must be designated before play is completed.\n This is done in nextEvenWildsOnBoard. All other events are ignored until num_wilds == 0 OR play is canceled.\"\"\"\n if self.controller._state.rules.Shared_Board:\n self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())\n if self.num_wilds > 0:\n self.nextEventWildsOnBoard()\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n wild_instructions = (\n 'Use the keyboard to designate your prepared wild cards \\r\\n '\n )\n wild_instructions = (wild_instructions +\n '(use 0 for 10 and J, Q, or K for facecards).')\n self.controller.note = wild_instructions\n pos = pygame.mouse.get_pos()\n if self.event.type == pygame.MOUSEBUTTONDOWN:\n self.RuleSetsButtons.ClickedButton(self, pos)\n for element in self.hand_info:\n if element.img_clickable.isOver(pos):\n if element.status == 1:\n element.status = 0\n element.img_clickable.changeOutline(0)\n elif element.status == 0:\n element.status = 1\n element.img_clickable.changeOutline(2)\n elif self.event.type == pygame.MOUSEMOTION:\n self.RuleSetsButtons.MouseHiLight(self, pos)\n HandManagement.MouseHiLight(self.hand_info, pos)\n elif self.event.type == pygame.KEYDOWN:\n if self.controller._state.rules.Buy_Option:\n if self.controller.buying_opportunity:\n if self.event.key == pygame.K_y:\n self.controller.wantTopCard(True)\n self.controller.note = (\n 'You have signaled you want to buy the card.')\n elif self.event.key == pygame.K_n:\n self.controller.wantTopCard(False)\n self.controller.note = (\n 'You have signaled you do not want to buy the card.'\n )\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n HandManagement.ManuallyAssign(self)\n\n def gatherSelected(self):\n \"\"\" gathers selected cards\n in order to take action on selected cards (either discarding them or preparing them)\n \"\"\"\n self.selected_list = []\n for element in self.hand_info:\n if element.status == 1:\n self.selected_list.append(element)\n return self.selected_list\n\n def discardConfirmation(self, confirmed, wrapped_discards):\n \"\"\" Confirm a user is sure about a discard and then perform it once confirmed.\"\"\"\n discards = []\n for element in wrapped_discards:\n discards.append(element.card)\n if self.discards != discards:\n confirmed = False\n self.discards = discards\n if not confirmed:\n self.controller.note = 'Please confirm - discard ' + '{0}'.format(\n self.discards)\n return True\n else:\n if self.discard_confirm:\n controller_response = self.controller.discard(self.discards)\n if controller_response:\n for element in wrapped_discards:\n self.hand_info.remove(element)\n return False\n <mask token>\n\n def labelMedium(self, labelstr, x_offset, y_offset):\n font = UIC.Medium_Text\n text_surface = font.render(labelstr, True, UIC.Bright_Blue)\n text_rect = text_surface.get_rect()\n text_rect.center = x_offset, y_offset\n self.display.blit(text_surface, text_rect)\n\n def playerLeftGame(self, num_players):\n self.controller.resetProcessedCards(self.visible_scards)\n self.controller.clearPreparedCards()\n self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.\n hand_info)\n self.controller.note = (\n 'A player has left the game, all prepared cards are automatically cleared.'\n )\n if num_players > 1:\n players_sp_w = UIC.Disp_Width / num_players\n else:\n players_sp_w = UIC.Disp_Width\n for idx in range(num_players):\n for button in self.assign_cards_btns[idx]:\n button.x = 10 + players_sp_w * idx\n",
"step-3": "<mask token>\n\n\nclass HandView:\n \"\"\"This class handles player's cards and enables actions.\n\n Actions are primarily performed using buttons, since these need to somewhat customized by game\n the buttons are in ***.py (*** is Liverpool or HandAndFoot) and are imported as RuleSetsButtons.\n Management of displaying the hand's cards is not game specific, and methods that help with that\n are in HandManagement.py.\n\n Player can arrange their own hand, and prepare to play cards during other players' turns.\n \"\"\"\n\n def __init__(self, controller, display, ruleset):\n self.controller = controller\n self.display = display\n self.ruleset = ruleset\n self.Meld_Threshold = controller._state.rules.Meld_Threshold\n self.deal_size = controller._state.rules.Deal_Size\n self.help_text = controller._state.rules.help_text\n if ruleset == 'Liverpool':\n self.buttons_per_player = self.Meld_Threshold[0][0\n ] + self.Meld_Threshold[0][1]\n self.RuleSetsButtons = RuleSetsButtons_LP\n elif ruleset == 'HandAndFoot':\n self.RuleSetsButtons = RuleSetsButtons_HF\n self.hand_scaling = UIC.scale, UIC.Card_Spacing\n self.current_hand = []\n self.last_hand = []\n self.hand_info = []\n self.prepared_cards = []\n self.discards = []\n self.discard_confirm = False\n self.num_wilds = 0\n self.wild_cards = []\n self.selected_list = []\n self.round_index = 0\n self.round_advance = False\n self.num_players = 1\n self.need_updated_buttons = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.RuleSetsButtons.CreateButtons(self)\n\n def update(self, player_index=0, num_players=1, visible_scards=[]):\n \"\"\"This updates the view of the hand, between rounds it displays a message. \"\"\"\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if (self.num_players > num_players and self.controller._state.rules\n .Shared_Board and not self.need_updated_buttons):\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.\n Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True\n else:\n self.help_text = [\n 'Game has concluded. Scores for each round can be found in command window.'\n ]\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n skipped_rounds = (self.controller._state.round - self.\n round_index)\n for idx in range(skipped_rounds):\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.\n current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info)\n self.RuleSetsButtons.ButtonDisplay(self)\n\n def nextEventWildsOnBoard(self):\n \"\"\"This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.\n\n It is looking for key strokes to designate ambiguous wild cards in runs.\n The mouse is ignored until you designate all the wilds (turn phase goes back to play).\"\"\"\n if self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n else:\n HandManagement.wildsHiLoGetInput(self)\n\n def nextEvent(self):\n \"\"\"This submits the next user input to the controller,\n\n In games with Shared_Board = False (e.g. HandAndFoot) key strokes don't do anything\n unless designating values for prepared wild cards, at which time the mouse is ignored\n unless you want to clear the prepared cards.\n In games with Shared_Board = True wilds on board might change designation upon other cards being played.\n IF designation cannot be handled automatically (= if wild can be at the beginning or end of a run) then\n it must be designated before play is completed.\n This is done in nextEvenWildsOnBoard. All other events are ignored until num_wilds == 0 OR play is canceled.\"\"\"\n if self.controller._state.rules.Shared_Board:\n self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())\n if self.num_wilds > 0:\n self.nextEventWildsOnBoard()\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n wild_instructions = (\n 'Use the keyboard to designate your prepared wild cards \\r\\n '\n )\n wild_instructions = (wild_instructions +\n '(use 0 for 10 and J, Q, or K for facecards).')\n self.controller.note = wild_instructions\n pos = pygame.mouse.get_pos()\n if self.event.type == pygame.MOUSEBUTTONDOWN:\n self.RuleSetsButtons.ClickedButton(self, pos)\n for element in self.hand_info:\n if element.img_clickable.isOver(pos):\n if element.status == 1:\n element.status = 0\n element.img_clickable.changeOutline(0)\n elif element.status == 0:\n element.status = 1\n element.img_clickable.changeOutline(2)\n elif self.event.type == pygame.MOUSEMOTION:\n self.RuleSetsButtons.MouseHiLight(self, pos)\n HandManagement.MouseHiLight(self.hand_info, pos)\n elif self.event.type == pygame.KEYDOWN:\n if self.controller._state.rules.Buy_Option:\n if self.controller.buying_opportunity:\n if self.event.key == pygame.K_y:\n self.controller.wantTopCard(True)\n self.controller.note = (\n 'You have signaled you want to buy the card.')\n elif self.event.key == pygame.K_n:\n self.controller.wantTopCard(False)\n self.controller.note = (\n 'You have signaled you do not want to buy the card.'\n )\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n HandManagement.ManuallyAssign(self)\n\n def gatherSelected(self):\n \"\"\" gathers selected cards\n in order to take action on selected cards (either discarding them or preparing them)\n \"\"\"\n self.selected_list = []\n for element in self.hand_info:\n if element.status == 1:\n self.selected_list.append(element)\n return self.selected_list\n\n def discardConfirmation(self, confirmed, wrapped_discards):\n \"\"\" Confirm a user is sure about a discard and then perform it once confirmed.\"\"\"\n discards = []\n for element in wrapped_discards:\n discards.append(element.card)\n if self.discards != discards:\n confirmed = False\n self.discards = discards\n if not confirmed:\n self.controller.note = 'Please confirm - discard ' + '{0}'.format(\n self.discards)\n return True\n else:\n if self.discard_confirm:\n controller_response = self.controller.discard(self.discards)\n if controller_response:\n for element in wrapped_discards:\n self.hand_info.remove(element)\n return False\n\n def mesgBetweenRounds(self, message):\n \"\"\"print message where cards usually displayed until Ready button is clicked for next round.\"\"\"\n font = UIC.Medium_Text\n y_offset = UIC.Disp_Height * (1 - UIC.Hand_Row_Fraction * 0.8)\n for message_string in message:\n text_surface = font.render(message_string, True, UIC.Black)\n text_rect = text_surface.get_rect()\n text_rect.center = UIC.Disp_Width * 0.5, y_offset\n y_offset = y_offset + UIC.Medium_Text_Feed\n self.display.blit(text_surface, text_rect)\n\n def labelMedium(self, labelstr, x_offset, y_offset):\n font = UIC.Medium_Text\n text_surface = font.render(labelstr, True, UIC.Bright_Blue)\n text_rect = text_surface.get_rect()\n text_rect.center = x_offset, y_offset\n self.display.blit(text_surface, text_rect)\n\n def playerLeftGame(self, num_players):\n self.controller.resetProcessedCards(self.visible_scards)\n self.controller.clearPreparedCards()\n self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.\n hand_info)\n self.controller.note = (\n 'A player has left the game, all prepared cards are automatically cleared.'\n )\n if num_players > 1:\n players_sp_w = UIC.Disp_Width / num_players\n else:\n players_sp_w = UIC.Disp_Width\n for idx in range(num_players):\n for button in self.assign_cards_btns[idx]:\n button.x = 10 + players_sp_w * idx\n",
"step-4": "import pygame\nimport textwrap\nimport client.Button as Btn\nfrom client.ClickableImage import ClickableImage as ClickImg\nfrom client.CreateDisplay import CreateDisplay\nimport client.LiverpoolButtons as RuleSetsButtons_LP\nimport client.HandAndFootButtons as RuleSetsButtons_HF\nimport client.HandManagement as HandManagement\nfrom client.UICardWrapper import UICardWrapper\nimport client.UIConstants as UIC\nfrom common.Card import Card\n\n\nclass HandView:\n \"\"\"This class handles player's cards and enables actions.\n\n Actions are primarily performed using buttons, since these need to somewhat customized by game\n the buttons are in ***.py (*** is Liverpool or HandAndFoot) and are imported as RuleSetsButtons.\n Management of displaying the hand's cards is not game specific, and methods that help with that\n are in HandManagement.py.\n\n Player can arrange their own hand, and prepare to play cards during other players' turns.\n \"\"\"\n\n def __init__(self, controller, display, ruleset):\n self.controller = controller\n self.display = display\n self.ruleset = ruleset\n self.Meld_Threshold = controller._state.rules.Meld_Threshold\n self.deal_size = controller._state.rules.Deal_Size\n self.help_text = controller._state.rules.help_text\n if ruleset == 'Liverpool':\n self.buttons_per_player = self.Meld_Threshold[0][0\n ] + self.Meld_Threshold[0][1]\n self.RuleSetsButtons = RuleSetsButtons_LP\n elif ruleset == 'HandAndFoot':\n self.RuleSetsButtons = RuleSetsButtons_HF\n self.hand_scaling = UIC.scale, UIC.Card_Spacing\n self.current_hand = []\n self.last_hand = []\n self.hand_info = []\n self.prepared_cards = []\n self.discards = []\n self.discard_confirm = False\n self.num_wilds = 0\n self.wild_cards = []\n self.selected_list = []\n self.round_index = 0\n self.round_advance = False\n self.num_players = 1\n self.need_updated_buttons = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.RuleSetsButtons.CreateButtons(self)\n\n def update(self, player_index=0, num_players=1, visible_scards=[]):\n \"\"\"This updates the view of the hand, between rounds it displays a message. \"\"\"\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if (self.num_players > num_players and self.controller._state.rules\n .Shared_Board and not self.need_updated_buttons):\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.\n Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True\n else:\n self.help_text = [\n 'Game has concluded. Scores for each round can be found in command window.'\n ]\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n skipped_rounds = (self.controller._state.round - self.\n round_index)\n for idx in range(skipped_rounds):\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.\n current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info)\n self.RuleSetsButtons.ButtonDisplay(self)\n\n def nextEventWildsOnBoard(self):\n \"\"\"This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.\n\n It is looking for key strokes to designate ambiguous wild cards in runs.\n The mouse is ignored until you designate all the wilds (turn phase goes back to play).\"\"\"\n if self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n else:\n HandManagement.wildsHiLoGetInput(self)\n\n def nextEvent(self):\n \"\"\"This submits the next user input to the controller,\n\n In games with Shared_Board = False (e.g. HandAndFoot) key strokes don't do anything\n unless designating values for prepared wild cards, at which time the mouse is ignored\n unless you want to clear the prepared cards.\n In games with Shared_Board = True wilds on board might change designation upon other cards being played.\n IF designation cannot be handled automatically (= if wild can be at the beginning or end of a run) then\n it must be designated before play is completed.\n This is done in nextEvenWildsOnBoard. All other events are ignored until num_wilds == 0 OR play is canceled.\"\"\"\n if self.controller._state.rules.Shared_Board:\n self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())\n if self.num_wilds > 0:\n self.nextEventWildsOnBoard()\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n print('pygame crash, AAAHHH')\n pygame.quit()\n quit()\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n wild_instructions = (\n 'Use the keyboard to designate your prepared wild cards \\r\\n '\n )\n wild_instructions = (wild_instructions +\n '(use 0 for 10 and J, Q, or K for facecards).')\n self.controller.note = wild_instructions\n pos = pygame.mouse.get_pos()\n if self.event.type == pygame.MOUSEBUTTONDOWN:\n self.RuleSetsButtons.ClickedButton(self, pos)\n for element in self.hand_info:\n if element.img_clickable.isOver(pos):\n if element.status == 1:\n element.status = 0\n element.img_clickable.changeOutline(0)\n elif element.status == 0:\n element.status = 1\n element.img_clickable.changeOutline(2)\n elif self.event.type == pygame.MOUSEMOTION:\n self.RuleSetsButtons.MouseHiLight(self, pos)\n HandManagement.MouseHiLight(self.hand_info, pos)\n elif self.event.type == pygame.KEYDOWN:\n if self.controller._state.rules.Buy_Option:\n if self.controller.buying_opportunity:\n if self.event.key == pygame.K_y:\n self.controller.wantTopCard(True)\n self.controller.note = (\n 'You have signaled you want to buy the card.')\n elif self.event.key == pygame.K_n:\n self.controller.wantTopCard(False)\n self.controller.note = (\n 'You have signaled you do not want to buy the card.'\n )\n if (not self.controller._state.rules.Shared_Board and self.\n num_wilds > 0):\n HandManagement.ManuallyAssign(self)\n\n def gatherSelected(self):\n \"\"\" gathers selected cards\n in order to take action on selected cards (either discarding them or preparing them)\n \"\"\"\n self.selected_list = []\n for element in self.hand_info:\n if element.status == 1:\n self.selected_list.append(element)\n return self.selected_list\n\n def discardConfirmation(self, confirmed, wrapped_discards):\n \"\"\" Confirm a user is sure about a discard and then perform it once confirmed.\"\"\"\n discards = []\n for element in wrapped_discards:\n discards.append(element.card)\n if self.discards != discards:\n confirmed = False\n self.discards = discards\n if not confirmed:\n self.controller.note = 'Please confirm - discard ' + '{0}'.format(\n self.discards)\n return True\n else:\n if self.discard_confirm:\n controller_response = self.controller.discard(self.discards)\n if controller_response:\n for element in wrapped_discards:\n self.hand_info.remove(element)\n return False\n\n def mesgBetweenRounds(self, message):\n \"\"\"print message where cards usually displayed until Ready button is clicked for next round.\"\"\"\n font = UIC.Medium_Text\n y_offset = UIC.Disp_Height * (1 - UIC.Hand_Row_Fraction * 0.8)\n for message_string in message:\n text_surface = font.render(message_string, True, UIC.Black)\n text_rect = text_surface.get_rect()\n text_rect.center = UIC.Disp_Width * 0.5, y_offset\n y_offset = y_offset + UIC.Medium_Text_Feed\n self.display.blit(text_surface, text_rect)\n\n def labelMedium(self, labelstr, x_offset, y_offset):\n font = UIC.Medium_Text\n text_surface = font.render(labelstr, True, UIC.Bright_Blue)\n text_rect = text_surface.get_rect()\n text_rect.center = x_offset, y_offset\n self.display.blit(text_surface, text_rect)\n\n def playerLeftGame(self, num_players):\n self.controller.resetProcessedCards(self.visible_scards)\n self.controller.clearPreparedCards()\n self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.\n hand_info)\n self.controller.note = (\n 'A player has left the game, all prepared cards are automatically cleared.'\n )\n if num_players > 1:\n players_sp_w = UIC.Disp_Width / num_players\n else:\n players_sp_w = UIC.Disp_Width\n for idx in range(num_players):\n for button in self.assign_cards_btns[idx]:\n button.x = 10 + players_sp_w * idx\n",
"step-5": "import pygame\nimport textwrap\nimport client.Button as Btn\nfrom client.ClickableImage import ClickableImage as ClickImg\nfrom client.CreateDisplay import CreateDisplay\nimport client.LiverpoolButtons as RuleSetsButtons_LP\nimport client.HandAndFootButtons as RuleSetsButtons_HF\nimport client.HandManagement as HandManagement\nfrom client.UICardWrapper import UICardWrapper\nimport client.UIConstants as UIC\nfrom common.Card import Card\n\n\nclass HandView:\n \"\"\"This class handles player's cards and enables actions.\n\n Actions are primarily performed using buttons, since these need to somewhat customized by game\n the buttons are in ***.py (*** is Liverpool or HandAndFoot) and are imported as RuleSetsButtons.\n Management of displaying the hand's cards is not game specific, and methods that help with that\n are in HandManagement.py.\n\n Player can arrange their own hand, and prepare to play cards during other players' turns.\n \"\"\"\n def __init__(self, controller, display, ruleset):\n self.controller = controller\n self.display = display\n self.ruleset = ruleset\n self.Meld_Threshold = controller._state.rules.Meld_Threshold\n self.deal_size = controller._state.rules.Deal_Size\n self.help_text = controller._state.rules.help_text\n if ruleset == 'Liverpool':\n self.buttons_per_player = self.Meld_Threshold[0][0] + self.Meld_Threshold[0][1]\n self.RuleSetsButtons = RuleSetsButtons_LP\n elif ruleset == 'HandAndFoot':\n self.RuleSetsButtons = RuleSetsButtons_HF\n self.hand_scaling = (UIC.scale, UIC.Card_Spacing)\n self.current_hand = []\n self.last_hand = []\n self.hand_info = [] # will contain UICardWrapped elements of current_hand\n self.prepared_cards = [] # will contain list of prepared cards from controller\n self.discards = []\n self.discard_confirm = False\n # num_wilds is HandAndFoot specific, only non-zero if by prepare_card_btn in HandAndFootButtons.py is triggered.\n self.num_wilds = 0\n self.wild_cards = []\n self.selected_list = []\n self.round_index = 0\n self.round_advance = False\n self.num_players = 1\n # In Liverpool and other Shared_Board games: prepare cards buttons must be updated each round\n self.need_updated_buttons = True\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n #\n # if someone joins between rounds, then they won't know the correct meld requirement until the round begins.\n # (self.controller._state.round = -1 until play commences).\n # In HandAndFoot: Correct meld requirement will be written in lower right corner once play commences.\n # In Liverpool: Will see correct buttons once round commences.\n self.RuleSetsButtons.CreateButtons(self)\n\n def update(self, player_index=0, num_players=1, visible_scards = []):\n \"\"\"This updates the view of the hand, between rounds it displays a message. \"\"\"\n\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if self.num_players > num_players and self.controller._state.rules.Shared_Board \\\n and not self.need_updated_buttons:\n # A player has left the game after the round has begun -- make adjustments so game can continue.\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True # used for Liverpool.\n else:\n self.help_text = ['Game has concluded. Scores for each round can be found in command window.']\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n # Need this to true up round_index if a player joins mid-game.\n skipped_rounds = self.controller._state.round - self.round_index\n for idx in range(skipped_rounds):\n #todo: How to score latecomers should be moved to ruleset.\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n # reset outline colors on ready buttons to what they need to be at the start of the \"between rounds\" state.\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info) # displays hand\n self.RuleSetsButtons.ButtonDisplay(self)\n\n def nextEventWildsOnBoard(self):\n \"\"\"This runs instead of most of nextEvent when Shared_Board is True and there are ambiguous wild cards.\n\n It is looking for key strokes to designate ambiguous wild cards in runs.\n The mouse is ignored until you designate all the wilds (turn phase goes back to play).\"\"\"\n\n if self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n # The window crashed, we should handle this\n print(\"pygame crash, AAAHHH\")\n pygame.quit()\n quit()\n else:\n # in Shared_Board games, check if there are wilds that need to be updated.\n # All other events are ignored until play is finished.\n HandManagement.wildsHiLoGetInput(self)\n\n def nextEvent(self):\n \"\"\"This submits the next user input to the controller,\n\n In games with Shared_Board = False (e.g. HandAndFoot) key strokes don't do anything\n unless designating values for prepared wild cards, at which time the mouse is ignored\n unless you want to clear the prepared cards.\n In games with Shared_Board = True wilds on board might change designation upon other cards being played.\n IF designation cannot be handled automatically (= if wild can be at the beginning or end of a run) then\n it must be designated before play is completed.\n This is done in nextEvenWildsOnBoard. All other events are ignored until num_wilds == 0 OR play is canceled.\"\"\"\n\n if self.controller._state.rules.Shared_Board:\n self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())\n if self.num_wilds > 0:\n self.nextEventWildsOnBoard()\n\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n # The window crashed, we should handle this\n print(\"pygame crash, AAAHHH\")\n pygame.quit()\n quit()\n\n if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n wild_instructions = 'Use the keyboard to designate your prepared wild cards \\r\\n '\n wild_instructions = wild_instructions + '(use 0 for 10 and J, Q, or K for facecards).'\n self.controller.note = wild_instructions\n pos = pygame.mouse.get_pos()\n\n if self.event.type == pygame.MOUSEBUTTONDOWN:\n self.RuleSetsButtons.ClickedButton(self, pos)\n for element in self.hand_info:\n # cannot select prepared cards, so not included in logic below.\n if element.img_clickable.isOver(pos):\n if element.status == 1:\n element.status = 0\n element.img_clickable.changeOutline(0)\n elif element.status == 0:\n element.status = 1\n element.img_clickable.changeOutline(2)\n\n elif self.event.type == pygame.MOUSEMOTION:\n self.RuleSetsButtons.MouseHiLight(self, pos)\n HandManagement.MouseHiLight(self.hand_info, pos)\n elif self.event.type == pygame.KEYDOWN:\n if self.controller._state.rules.Buy_Option:\n if self.controller.buying_opportunity:\n if self.event.key == pygame.K_y:\n self.controller.wantTopCard(True)\n self.controller.note = 'You have signaled you want to buy the card.'\n elif self.event.key == pygame.K_n:\n self.controller.wantTopCard(False)\n self.controller.note = 'You have signaled you do not want to buy the card.'\n if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n HandManagement.ManuallyAssign(self)\n\n\n def gatherSelected(self):\n \"\"\" gathers selected cards\n in order to take action on selected cards (either discarding them or preparing them)\n \"\"\"\n self.selected_list = []\n for element in self.hand_info:\n if element.status == 1:\n self.selected_list.append(element)\n return self.selected_list\n\n def discardConfirmation(self, confirmed, wrapped_discards):\n \"\"\" Confirm a user is sure about a discard and then perform it once confirmed.\"\"\"\n discards = []\n for element in wrapped_discards:\n discards.append(element.card)\n if self.discards != discards:\n confirmed = False\n self.discards = discards\n if not confirmed:\n self.controller.note = \"Please confirm - discard \" + \"{0}\".format(self.discards)\n return True # ask for confirmation\n else:\n # confirmed is True, performing discard and removing discarded wrapped cards from hand_info.\n if self.discard_confirm:\n controller_response = self.controller.discard(self.discards)\n if controller_response:\n for element in wrapped_discards:\n self.hand_info.remove(element)\n return False # now that this is done, we don't have anything waiting on confirmation\n\n def mesgBetweenRounds(self, message):\n \"\"\"print message where cards usually displayed until Ready button is clicked for next round.\"\"\"\n font = UIC.Medium_Text\n y_offset = (UIC.Disp_Height * (1 - (UIC.Hand_Row_Fraction * 0.8)))\n for message_string in message:\n text_surface = font.render(message_string, True, UIC.Black)\n text_rect = text_surface.get_rect()\n text_rect.center = ((UIC.Disp_Width * 0.5), y_offset)\n y_offset = y_offset + UIC.Medium_Text_Feed\n self.display.blit(text_surface, text_rect)\n\n def labelMedium(self, labelstr, x_offset, y_offset):\n font = UIC.Medium_Text\n text_surface = font.render(labelstr, True, UIC.Bright_Blue)\n text_rect = text_surface.get_rect()\n text_rect.center = (x_offset, y_offset)\n self.display.blit(text_surface, text_rect)\n\n def playerLeftGame(self, num_players):\n # a player has disconnected a game with a Shared_Board = True. Must make adjustments to\n # (i) card group dictionaries, (ii) prepared cards & (iii) buttons locations.\n self.controller.resetProcessedCards(self.visible_scards)\n self.controller.clearPreparedCards() # so that prepared cards won't be mistakenly played on wrong group.\n self.hand_info = HandManagement.ClearPreparedCardsInHandView(self.hand_info)\n self.controller.note = \"A player has left the game, all prepared cards are automatically cleared.\"\n # reset set/run button locations:\n if num_players > 1:\n players_sp_w = UIC.Disp_Width / num_players\n else:\n players_sp_w = UIC.Disp_Width\n for idx in range(num_players):\n for button in self.assign_cards_btns[idx]:\n button.x = 10 + (players_sp_w * idx)\n",
"step-ids": [
7,
9,
11,
12,
13
]
}
|
[
7,
9,
11,
12,
13
] |
print('hello world123')
|
normal
|
{
"blob_id": "004a02f7ff49cb1b63ebedfcfcb4937377859099",
"index": 1187,
"step-1": "<mask token>\n",
"step-2": "print('hello world123')\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
# Generated by Django 3.2.3 on 2021-07-24 12:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('profiles', '0018_userprofile_membership_fee_pending'),
]
operations = [
migrations.RenameField(
model_name='userprofile',
old_name='membership_fee_pending',
new_name='membership_fee_paid',
),
]
|
normal
|
{
"blob_id": "464980a2f17aeedfa08548d6c4e247f8c047e2cb",
"index": 5743,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('profiles', '0018_userprofile_membership_fee_pending')]\n operations = [migrations.RenameField(model_name='userprofile', old_name\n ='membership_fee_pending', new_name='membership_fee_paid')]\n",
"step-4": "from django.db import migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = [('profiles', '0018_userprofile_membership_fee_pending')]\n operations = [migrations.RenameField(model_name='userprofile', old_name\n ='membership_fee_pending', new_name='membership_fee_paid')]\n",
"step-5": "# Generated by Django 3.2.3 on 2021-07-24 12:14\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('profiles', '0018_userprofile_membership_fee_pending'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='userprofile',\n old_name='membership_fee_pending',\n new_name='membership_fee_paid',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import logging
class ConsoleLogger:
handlers = [
(logging.StreamHandler,
dict(),
"[%(name)s]\t %(asctime)s [%(levelname)s] %(message)s ",
logging.DEBUG)
]
def set_level(self, level):
self.logger.setLevel(level)
def debug(self, message):
self.logger.debug(message)
def info(self, message):
self.logger.info(message)
def warning(self, message):
self.logger.warning(message)
def error(self, message):
self.logger.error(message)
def exception(self, message):
self.logger.exception(message)
def __init__(self, name=__name__, default_level=logging.DEBUG):
self.logger = logging.Logger(name)
if not self.logger.handlers or len(self.logger.handlers) < 1:
for handler_class, params, formatted, level in self.handlers:
handler = handler_class(**params)
handler.setFormatter(logging.Formatter(formatted))
handler.setLevel(level if not default_level else default_level)
self.logger.addHandler(handler)
|
normal
|
{
"blob_id": "5299f2c66fd287be667ecbe11b8470263eafab5c",
"index": 702,
"step-1": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-2": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n <mask token>\n <mask token>\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-3": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n <mask token>\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-4": "<mask token>\n\n\nclass ConsoleLogger:\n <mask token>\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n\n def error(self, message):\n self.logger.error(message)\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n self.logger.addHandler(handler)\n",
"step-5": "import logging\n\n\nclass ConsoleLogger:\n\n handlers = [\n (logging.StreamHandler,\n dict(),\n \"[%(name)s]\\t %(asctime)s [%(levelname)s] %(message)s \",\n logging.DEBUG)\n ]\n\n def set_level(self, level):\n self.logger.setLevel(level)\n\n def debug(self, message):\n self.logger.debug(message)\n\n def info(self, message):\n self.logger.info(message)\n\n def warning(self, message):\n self.logger.warning(message)\n\n def error(self, message):\n self.logger.error(message)\n\n def exception(self, message):\n self.logger.exception(message)\n\n def __init__(self, name=__name__, default_level=logging.DEBUG):\n self.logger = logging.Logger(name)\n if not self.logger.handlers or len(self.logger.handlers) < 1:\n for handler_class, params, formatted, level in self.handlers:\n handler = handler_class(**params)\n handler.setFormatter(logging.Formatter(formatted))\n handler.setLevel(level if not default_level else default_level)\n\n self.logger.addHandler(handler)\n",
"step-ids": [
3,
6,
7,
8,
11
]
}
|
[
3,
6,
7,
8,
11
] |
from os import environ
from flask import Flask
from flask_restful import Api
from flask_migrate import Migrate
from applications.db import db
from applications.gamma_api import add_module_gamma
app = Flask(__name__)
app.config["DEBUG"] = True
app.config['SQLALCHEMY_DATABASE_URI'] = environ.get('DATABASE')
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["PROPAGATE_EXCEPTIONS"] = True
api = Api(app)
db.init_app(app)
migrate = Migrate(app, db)
@app.before_first_request
def create_tables():
pass
# db.create_all()
add_module_gamma(api)
|
normal
|
{
"blob_id": "fbb081fd52b14336ab4537bb795105bcd6a03070",
"index": 3045,
"step-1": "<mask token>\n\n\[email protected]_first_request\ndef create_tables():\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\ndb.init_app(app)\n<mask token>\n\n\[email protected]_first_request\ndef create_tables():\n pass\n\n\nadd_module_gamma(api)\n",
"step-3": "<mask token>\napp = Flask(__name__)\napp.config['DEBUG'] = True\napp.config['SQLALCHEMY_DATABASE_URI'] = environ.get('DATABASE')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['PROPAGATE_EXCEPTIONS'] = True\napi = Api(app)\ndb.init_app(app)\nmigrate = Migrate(app, db)\n\n\[email protected]_first_request\ndef create_tables():\n pass\n\n\nadd_module_gamma(api)\n",
"step-4": "from os import environ\nfrom flask import Flask\nfrom flask_restful import Api\nfrom flask_migrate import Migrate\nfrom applications.db import db\nfrom applications.gamma_api import add_module_gamma\napp = Flask(__name__)\napp.config['DEBUG'] = True\napp.config['SQLALCHEMY_DATABASE_URI'] = environ.get('DATABASE')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['PROPAGATE_EXCEPTIONS'] = True\napi = Api(app)\ndb.init_app(app)\nmigrate = Migrate(app, db)\n\n\[email protected]_first_request\ndef create_tables():\n pass\n\n\nadd_module_gamma(api)\n",
"step-5": "from os import environ\n\nfrom flask import Flask\nfrom flask_restful import Api\nfrom flask_migrate import Migrate\n\nfrom applications.db import db\nfrom applications.gamma_api import add_module_gamma\n\napp = Flask(__name__)\napp.config[\"DEBUG\"] = True\napp.config['SQLALCHEMY_DATABASE_URI'] = environ.get('DATABASE')\napp.config[\"SQLALCHEMY_TRACK_MODIFICATIONS\"] = False\napp.config[\"PROPAGATE_EXCEPTIONS\"] = True\n\napi = Api(app)\n\ndb.init_app(app)\n\nmigrate = Migrate(app, db)\n\n\[email protected]_first_request\ndef create_tables():\n pass\n # db.create_all()\n\n\nadd_module_gamma(api)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
'''
we have source files with a certain format and each file has 200 columns and there is a process that takes the source
files and loads into hbase and moves it into sql data warehouse. We have to create automated test scripts that compares
with with is with hbase and sql data warehouse. load into hbase and query the flat file, query the hbase, and compare.
compare each row. load into hbase and query.
https://community.hortonworks.com/articles/4942/import-csv-data-into-hbase-using-importtsv.html
https://www.briandunning.com/sample-data/
http://python-phoenixdb.readthedocs.io/en/latest/
https://phoenix.apache.org/faq.html
https://phoenix.apache.org/bulk_dataload.html
hbase shell
create 'CUSTOMERS', 'cf'
count 'CUSTOMERS'
scan 'CUSTOMERS'
exit
hdfs dfs -put customers-with-out-header-500.csv
hbase org.apache.hadoop.hbase.mapreduce.ImportTsv '-Dimporttsv.separator=|' -Dimporttsv.columns="HBASE_ROW_KEY,cf:first_name,cf:last_name,cf:company_name,cf:address,cf:city,cf:county,cf:state,cf:zip,cf:phone1,cf:phone2,cf:email,cf:web" CUSTOMERS customers-with-out-header-500.csv
sudo python3 -m pip install happybase
sudo python3 -m pip install pandas
sudo python3 -m pip install numpy
sudo python3 -m pip install ipython
list of hbase tables [b'customers']
len of hbase keys 501
hbase columns [b'cf:state', b'cf:phone2', b'cf:email', b'cf:zip', b'cf:last_name', b'cf:address', b'cf:city', b'cf:company_name', b'cf:phone1', b'cf:county', b'cf:first_name', b'cf:web']
hbase columns len 12
csv file shape (500, 13)
csv columns ['index', 'first_name', 'last_name', 'company_name', 'address', 'city', 'county', 'state', 'zip', 'phone1', 'phone2', 'email', 'web']
phoenix steps
python /usr/lib/phoenix/bin/sqlline.py
CREATE TABLE "CUSTOMERSPHOENIX" (pk VARCHAR PRIMARY KEY, first_name VARCHAR, last_name VARCHAR, company_name VARCHAR, address VARCHAR, city VARCHAR, county VARCHAR, state VARCHAR, zip VARCHAR, phone1 VARCHAR, phone2 VARCHAR, email VARCHAR, web VARCHAR)
python /usr/lib/phoenix/bin/psql.py -t CUSTOMERSPHOENIX -d "|" localhost customers-with-out-header-500.csv
SELECT A.*, B.* FROM CUSTOMERS AS A FULL JOIN CUSTOMERSPHOENIX AS B ON (A.PK = B.PK) WHERE A.PK IS NULL OR B.PK IS NULL
hive steps
CREATE EXTERNAL TABLE customers_hive(key string, first_name string, last_name string, company_name string, address string, city string, county string, state string, zip string, phone1 string, phone2 string, email string, web string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:first_name, cf:last_name, cf:company_name, cf:address, cf:city, cf:county, cf:state, cf:zip, cf:phone1, cf:phone2, cf:email, cf:web")
TBLPROPERTIES ("hbase.table.name"="CUSTOMERS");
SELECT yourcolumns
FROM tablenames
JOIN tablenames
WHERE condition
GROUP BY yourcolumns
HAVING aggregatecolumn condition
ORDER BY yourcolumns
'''
import pandas as pd
import happybase
import phoenixdb
from pyhive import hive
connection = happybase.Connection()
connection.open()
print('list of hbase tables {}'.format(connection.tables()))
customers = connection.table('CUSTOMERS')
keys = []
data_list = []
for key, data in customers.scan():
keys.append(key)
data_list.append(data)
hbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]
print('len of hbase keys {}'.format(len(keys)))
print('hbase columns {}'.format(hbase_columns))
print('hbase columns len {}'.format(len(hbase_columns)))
df = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col='index')
df_columns = list(df.columns)
print('csv file shape {}'.format(df.shape))
print('csv columns {}'.format(df_columns))
print('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(df_columns)))
print('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))
url = 'http://localhost:8765/'
conn = phoenixdb.connect(url, autocommit=True)
cursor = conn.cursor()
query1 = 'DROP VIEW "CUSTOMERS"'
cursor.execute(query1)
query2 = 'CREATE VIEW "CUSTOMERS" (pk VARCHAR PRIMARY KEY, "cf"."first_name" VARCHAR, "cf"."last_name" VARCHAR, "cf"."company_name" VARCHAR, "cf"."address" VARCHAR, "cf"."city" VARCHAR, "cf"."county" VARCHAR, "cf"."state" VARCHAR, "cf"."zip" VARCHAR, "cf"."phone1" VARCHAR, "cf"."phone2" VARCHAR, "cf"."email" VARCHAR, "cf"."web" VARCHAR)'
cursor.execute(query2)
query3 = 'SELECT * FROM CUSTOMERS'
cursor.execute(query3)
data = cursor.fetchall()
print(data[:2])
from pyhive import hive # or import hive
cursor = hive.connect('localhost').cursor()
cursor.execute('SELECT * FROM customers_hive LIMIT 10')
result = cursor.fetchall()
print(len(result))
print(result)
|
normal
|
{
"blob_id": "7b38c64174656d1c4ec2b0541e6ed8d6680af7d7",
"index": 9565,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\n<mask token>\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\n<mask token>\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\n<mask token>\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\n<mask token>\ncursor.execute(query1)\n<mask token>\ncursor.execute(query2)\n<mask token>\ncursor.execute(query3)\n<mask token>\nprint(data[:2])\n<mask token>\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\n<mask token>\nprint(len(result))\nprint(result)\n",
"step-3": "<mask token>\nconnection = happybase.Connection()\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\ncustomers = connection.table('CUSTOMERS')\nkeys = []\ndata_list = []\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=\n 'index')\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = (\n 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\n )\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\n<mask token>\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n",
"step-4": "<mask token>\nimport pandas as pd\nimport happybase\nimport phoenixdb\nfrom pyhive import hive\nconnection = happybase.Connection()\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\ncustomers = connection.table('CUSTOMERS')\nkeys = []\ndata_list = []\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=\n 'index')\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = (\n 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\n )\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\nfrom pyhive import hive\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n",
"step-5": "'''\nwe have source files with a certain format and each file has 200 columns and there is a process that takes the source\nfiles and loads into hbase and moves it into sql data warehouse. We have to create automated test scripts that compares\nwith with is with hbase and sql data warehouse. load into hbase and query the flat file, query the hbase, and compare.\ncompare each row. load into hbase and query.\n\nhttps://community.hortonworks.com/articles/4942/import-csv-data-into-hbase-using-importtsv.html\nhttps://www.briandunning.com/sample-data/\nhttp://python-phoenixdb.readthedocs.io/en/latest/\nhttps://phoenix.apache.org/faq.html\nhttps://phoenix.apache.org/bulk_dataload.html\n\nhbase shell\ncreate 'CUSTOMERS', 'cf'\ncount 'CUSTOMERS'\nscan 'CUSTOMERS'\nexit\n\nhdfs dfs -put customers-with-out-header-500.csv\nhbase org.apache.hadoop.hbase.mapreduce.ImportTsv '-Dimporttsv.separator=|' -Dimporttsv.columns=\"HBASE_ROW_KEY,cf:first_name,cf:last_name,cf:company_name,cf:address,cf:city,cf:county,cf:state,cf:zip,cf:phone1,cf:phone2,cf:email,cf:web\" CUSTOMERS customers-with-out-header-500.csv\n\nsudo python3 -m pip install happybase\nsudo python3 -m pip install pandas\nsudo python3 -m pip install numpy\nsudo python3 -m pip install ipython\n\nlist of hbase tables [b'customers']\nlen of hbase keys 501\nhbase columns [b'cf:state', b'cf:phone2', b'cf:email', b'cf:zip', b'cf:last_name', b'cf:address', b'cf:city', b'cf:company_name', b'cf:phone1', b'cf:county', b'cf:first_name', b'cf:web']\nhbase columns len 12\ncsv file shape (500, 13)\ncsv columns ['index', 'first_name', 'last_name', 'company_name', 'address', 'city', 'county', 'state', 'zip', 'phone1', 'phone2', 'email', 'web']\n\nphoenix steps\npython /usr/lib/phoenix/bin/sqlline.py\nCREATE TABLE \"CUSTOMERSPHOENIX\" (pk VARCHAR PRIMARY KEY, first_name VARCHAR, last_name VARCHAR, company_name VARCHAR, address VARCHAR, city VARCHAR, county VARCHAR, state VARCHAR, zip VARCHAR, phone1 VARCHAR, phone2 VARCHAR, email VARCHAR, web VARCHAR)\npython /usr/lib/phoenix/bin/psql.py -t CUSTOMERSPHOENIX -d \"|\" localhost customers-with-out-header-500.csv\nSELECT A.*, B.* FROM CUSTOMERS AS A FULL JOIN CUSTOMERSPHOENIX AS B ON (A.PK = B.PK) WHERE A.PK IS NULL OR B.PK IS NULL\n\nhive steps\n\nCREATE EXTERNAL TABLE customers_hive(key string, first_name string, last_name string, company_name string, address string, city string, county string, state string, zip string, phone1 string, phone2 string, email string, web string)\nSTORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'\nWITH SERDEPROPERTIES (\"hbase.columns.mapping\" = \":key, cf:first_name, cf:last_name, cf:company_name, cf:address, cf:city, cf:county, cf:state, cf:zip, cf:phone1, cf:phone2, cf:email, cf:web\")\nTBLPROPERTIES (\"hbase.table.name\"=\"CUSTOMERS\");\n\nSELECT yourcolumns\nFROM tablenames\nJOIN tablenames\nWHERE condition\nGROUP BY yourcolumns\nHAVING aggregatecolumn condition\nORDER BY yourcolumns\n'''\n\nimport pandas as pd\nimport happybase\nimport phoenixdb\nfrom pyhive import hive\n\n\nconnection = happybase.Connection()\nconnection.open()\n\nprint('list of hbase tables {}'.format(connection.tables()))\n\ncustomers = connection.table('CUSTOMERS')\n\nkeys = []\ndata_list = []\n\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\n\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\n\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\n\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col='index')\n\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\n\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\n\n\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\n\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\n\n\nfrom pyhive import hive # or import hive\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
##Arithmatic Progression
a = int(input ('Enter first number: '))
d = int(input('Enter common difference: '))
n = int(input('Number of term: '))
tn = a
while tn <= a + (n - 1) * d:
print(tn, end=" ")
tn += d
|
normal
|
{
"blob_id": "e748261d1e5fd7921a022afefe5a5bea1fbfc67c",
"index": 9095,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile tn <= a + (n - 1) * d:\n print(tn, end=' ')\n tn += d\n",
"step-3": "a = int(input('Enter first number: '))\nd = int(input('Enter common difference: '))\nn = int(input('Number of term: '))\ntn = a\nwhile tn <= a + (n - 1) * d:\n print(tn, end=' ')\n tn += d\n",
"step-4": "##Arithmatic Progression\r\n\r\na = int(input ('Enter first number: '))\r\nd = int(input('Enter common difference: '))\r\nn = int(input('Number of term: '))\r\n\r\ntn = a\r\n\r\nwhile tn <= a + (n - 1) * d:\r\n print(tn, end=\" \")\r\n tn += d\r\n \r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from pyspark.sql import SQLContext, Row
from pyspark import SparkContext, SparkConf
from pyspark.sql.functions import col
import collections
# Create a Spark Session (the config bit is only for windows)
#conf = SparkConf().setAppName("SQL App").setMaster("local")
sc = SparkContext()
sqlCtx = SQLContext(sc)
def mapper(line):
fields = line.split(",")
return Row(ID = int(fields[0]), name = fields[1].encode("utf-8"), age = int(fields[2]), numFriends = int(fields[3]))
lines = sc.textFile("fakefriends.csv")
people = lines.map(mapper)
# Infer the schema and register the DataFrame as a table
schemaPeople = sqlCtx.createDataFrame(people).cache()
schemaPeople.registerTempTable("people")
# SQL can be run over DataFrames that have been registered as a table
teenagers = sqlCtx.sql("SELECT * FROM people WHERE age >= 13 AND age <= 19")
print(teenagers.dtypes)
for teen in teenagers.collect():
print(teen)
schemaPeople.groupBy("age").count().orderBy(col("age").desc()).show()
|
normal
|
{
"blob_id": "e4bc2e97b70e2dc91dc86457866ec6b3531ef803",
"index": 8772,
"step-1": "<mask token>\n\n\ndef mapper(line):\n fields = line.split(',')\n return Row(ID=int(fields[0]), name=fields[1].encode('utf-8'), age=int(\n fields[2]), numFriends=int(fields[3]))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef mapper(line):\n fields = line.split(',')\n return Row(ID=int(fields[0]), name=fields[1].encode('utf-8'), age=int(\n fields[2]), numFriends=int(fields[3]))\n\n\n<mask token>\nschemaPeople.registerTempTable('people')\n<mask token>\nprint(teenagers.dtypes)\nfor teen in teenagers.collect():\n print(teen)\nschemaPeople.groupBy('age').count().orderBy(col('age').desc()).show()\n",
"step-3": "<mask token>\nsc = SparkContext()\nsqlCtx = SQLContext(sc)\n\n\ndef mapper(line):\n fields = line.split(',')\n return Row(ID=int(fields[0]), name=fields[1].encode('utf-8'), age=int(\n fields[2]), numFriends=int(fields[3]))\n\n\nlines = sc.textFile('fakefriends.csv')\npeople = lines.map(mapper)\nschemaPeople = sqlCtx.createDataFrame(people).cache()\nschemaPeople.registerTempTable('people')\nteenagers = sqlCtx.sql('SELECT * FROM people WHERE age >= 13 AND age <= 19')\nprint(teenagers.dtypes)\nfor teen in teenagers.collect():\n print(teen)\nschemaPeople.groupBy('age').count().orderBy(col('age').desc()).show()\n",
"step-4": "from pyspark.sql import SQLContext, Row\nfrom pyspark import SparkContext, SparkConf\nfrom pyspark.sql.functions import col\nimport collections\nsc = SparkContext()\nsqlCtx = SQLContext(sc)\n\n\ndef mapper(line):\n fields = line.split(',')\n return Row(ID=int(fields[0]), name=fields[1].encode('utf-8'), age=int(\n fields[2]), numFriends=int(fields[3]))\n\n\nlines = sc.textFile('fakefriends.csv')\npeople = lines.map(mapper)\nschemaPeople = sqlCtx.createDataFrame(people).cache()\nschemaPeople.registerTempTable('people')\nteenagers = sqlCtx.sql('SELECT * FROM people WHERE age >= 13 AND age <= 19')\nprint(teenagers.dtypes)\nfor teen in teenagers.collect():\n print(teen)\nschemaPeople.groupBy('age').count().orderBy(col('age').desc()).show()\n",
"step-5": "from pyspark.sql import SQLContext, Row\nfrom pyspark import SparkContext, SparkConf\nfrom pyspark.sql.functions import col\n\nimport collections\n\n# Create a Spark Session (the config bit is only for windows)\n#conf = SparkConf().setAppName(\"SQL App\").setMaster(\"local\")\nsc = SparkContext()\n\nsqlCtx = SQLContext(sc)\n\ndef mapper(line):\n\tfields = line.split(\",\")\n\treturn Row(ID = int(fields[0]), name = fields[1].encode(\"utf-8\"), age = int(fields[2]), numFriends = int(fields[3]))\n\nlines = sc.textFile(\"fakefriends.csv\")\npeople = lines.map(mapper)\n\n# Infer the schema and register the DataFrame as a table\nschemaPeople = sqlCtx.createDataFrame(people).cache()\nschemaPeople.registerTempTable(\"people\")\n\n# SQL can be run over DataFrames that have been registered as a table\nteenagers = sqlCtx.sql(\"SELECT * FROM people WHERE age >= 13 AND age <= 19\")\nprint(teenagers.dtypes)\n\nfor teen in teenagers.collect():\n\tprint(teen)\n\nschemaPeople.groupBy(\"age\").count().orderBy(col(\"age\").desc()).show()\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""CPU functionality."""
import sys
HLT = 0b00000001
LDI = 0b10000010
PRN = 0b01000111
MUL = 0b10100010
PUSH = 0b01000101
POP = 0b01000110
CMP = 0b10100111
CALL = 0b01010000
RET = 0b00010001
ADD = 0b10100000
CMP = 0b10100111
JMP = 0b01010100
JEQ = 0b01010101
JNE = 0b01010110
AND = 0b10101000
NOT = 0b01101001
OR = 0b10101010
XOR = 0b10101011
SHL = 0b10101100
SHR = 0b10101101
MOD = 0b10100100
class CPU:
"""Main CPU class."""
def __init__(self):
"""Construct a new CPU."""
self.reg = [0] * 8
self.pc = 0
self.ram = [0] * 256
self.running = True
self.reg[7] = 0xf4
self.sp = self.reg[7]
self.fl = 0b00000000
self.branchtable = {}
self.branchtable[HLT] = self.op_hlt
self.branchtable[LDI] = self.op_ldi
self.branchtable[PRN] = self.op_prn
self.branchtable[MUL] = self.op_mul
self.branchtable[PUSH] = self.op_push
self.branchtable[POP] = self.op_pop
self.branchtable[CALL] = self.op_call
self.branchtable[RET] = self.op_ret
self.branchtable[ADD] = self.op_add
self.branchtable[CMP] = self.op_cmp
self.branchtable[JMP] = self.op_jmp
self.branchtable[JEQ] = self.op_jeq
self.branchtable[JNE] = self.op_jne
self.branchtable[AND] = self.op_and
self.branchtable[NOT] = self.op_not
self.branchtable[OR] = self.op_or
self.branchtable[XOR] = self.op_xor
self.branchtable[SHL] = self.op_shl
self.branchtable[SHR] = self.op_shr
self.branchtable[MOD] = self.op_mod
def ram_read(self, MAR):
return self.ram[MAR]
def ram_write(self, MAR, MDR):
self.ram[MAR] = MDR
def op_hlt(self, operand_a, operand_b):
self.running = False
def op_ldi(self, operand_a, operand_b):
self.reg[operand_a] = operand_b
# self.pc += 3
def op_prn(self, operand_a, operand_b):
print('prn:', self.reg[operand_a])
# self.pc += 2
def op_mul(self, operand_a, operand_b):
self.alu('MUL', operand_a, operand_b)
# self.pc += 3
def op_push(self, operand_a, operand_b):
self.sp -= 1
val = self.reg[operand_a]
self.ram_write(self.sp, val)
# self.pc += 2
def op_pop(self, operand_a, operand_b):
self.reg[operand_a] = self.ram_read(self.sp)
# self.pc += 2
self.sp += 1
def op_call(self, operand_a, operand_b):
ret_addr = self.pc + 2
self.sp -= 1
self.ram_write(self.sp, ret_addr) # write sp and pc location to ram
sub_addr = self.reg[operand_a]
self.pc = sub_addr
def op_ret(self, operand_a, operand_b):
ret_addr = self.ram_read(self.sp) # set ret_addr to location in ram
self.sp += 1
self.pc = ret_addr
def op_add(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_cmp(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_jmp(self, operand_a, operand_b):
self.pc = self.reg[operand_a]
def op_jeq(self, operand_a, operand_b):
if self.fl == 0b00000001:
self.op_jmp(operand_a, operand_b)
else:
self.pc += 2
def op_jne(self, operand_a, operand_b):
if self.fl != 0b00000001:
self.op_jmp(operand_a, operand_b)
else:
self.pc += 2
def op_and(self, operand_a, operand_b):
self.alu('AND', operand_a, operand_b)
def op_or(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_xor(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_not(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_shl(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_shr(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_mod(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def load(self, filename):
"""Load a program into memory."""
address = 0
with open(filename) as file:
for line in file:
val = line.split("#")[0].strip()
if val == '':
continue
instruction = int(val, 2)
self.ram[address] = instruction
address += 1
# For now, we've just hardcoded a program:
# program = [
# # From print8.ls8
# 0b10000010, # LDI R0,8
# 0b00000000,
# 0b00001000,
# 0b01000111, # PRN R0
# 0b00000000,
# 0b00000001, # HLT
# ]
# for instruction in program:
# self.ram[address] = instruction
# address += 1
def alu(self, op, reg_a, reg_b):
"""ALU operations."""
if op == 'ADD':
self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]
elif op == 'MUL':
self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]
elif op == 'CMP':
if self.reg[reg_a] < self.reg[reg_b]:
self.fl = 0b00000100
elif self.reg[reg_a] > self.reg[reg_b]:
self.fl = 0b00000010
elif self.reg[reg_a] == self.reg[reg_b]:
self.fl = 0b00000001
elif op == 'AND':
self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]
elif op == 'OR':
self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]
elif op == 'XOR':
self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]
elif op == 'NOT':
self.reg[reg_a] = ~self.reg[reg_a]
elif op == 'SHL':
self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]
elif op == 'SHR':
self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]
elif op == 'MOD':
if self.reg[reg_b] == 0:
print('ERROR: divide by 0')
self.op_hlt()
else:
remainder = self.reg[reg_a] % self.reg[reg_b]
self.reg[reg_a] = remainder
else:
raise Exception("Unsupported ALU operation")
def trace(self):
"""
Handy function to print out the CPU state. You might want to call this
from run() if you need help debugging.
"""
print(f"TRACE: %02X | %02X %02X %02X |" % (
self.pc,
# self.fl,
# self.ie,
self.ram_read(self.pc),
self.ram_read(self.pc + 1),
self.ram_read(self.pc + 2)
), end='')
for i in range(8):
print(" %02X" % self.reg[i], end='')
print()
def run(self):
"""Run the CPU."""
self.trace()
while self.running is True:
IR = self.ram_read(self.pc)
operand_a = self.ram_read(self.pc + 1)
operand_b = self.ram_read(self.pc + 2)
# This increments the pc position automatically
op_size = IR >> 6
ins_set = ((IR >> 4) & 0b1) == 1
if not ins_set:
self.pc += op_size + 1
if IR in self.branchtable:
self.branchtable[IR](operand_a, operand_b)
# SAVE WHERE WE'RE COMING FROM TO THE STACK AND SET PC TO WHERE WE'RE GOING
|
normal
|
{
"blob_id": "58d144b2c6c307719cef0b5097945c8206135ccf",
"index": 6048,
"step-1": "<mask token>\n\n\nclass CPU:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def op_ldi(self, operand_a, operand_b):\n self.reg[operand_a] = operand_b\n\n def op_prn(self, operand_a, operand_b):\n print('prn:', self.reg[operand_a])\n\n def op_mul(self, operand_a, operand_b):\n self.alu('MUL', operand_a, operand_b)\n\n def op_push(self, operand_a, operand_b):\n self.sp -= 1\n val = self.reg[operand_a]\n self.ram_write(self.sp, val)\n\n def op_pop(self, operand_a, operand_b):\n self.reg[operand_a] = self.ram_read(self.sp)\n self.sp += 1\n\n def op_call(self, operand_a, operand_b):\n ret_addr = self.pc + 2\n self.sp -= 1\n self.ram_write(self.sp, ret_addr)\n sub_addr = self.reg[operand_a]\n self.pc = sub_addr\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def op_jne(self, operand_a, operand_b):\n if self.fl != 1:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_and(self, operand_a, operand_b):\n self.alu('AND', operand_a, operand_b)\n\n def op_or(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_xor(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def trace(self):\n \"\"\"\n Handy function to print out the CPU state. You might want to call this\n from run() if you need help debugging.\n \"\"\"\n print(f'TRACE: %02X | %02X %02X %02X |' % (self.pc, self.ram_read(\n self.pc), self.ram_read(self.pc + 1), self.ram_read(self.pc + 2\n )), end='')\n for i in range(8):\n print(' %02X' % self.reg[i], end='')\n print()\n\n def run(self):\n \"\"\"Run the CPU.\"\"\"\n self.trace()\n while self.running is True:\n IR = self.ram_read(self.pc)\n operand_a = self.ram_read(self.pc + 1)\n operand_b = self.ram_read(self.pc + 2)\n op_size = IR >> 6\n ins_set = IR >> 4 & 1 == 1\n if not ins_set:\n self.pc += op_size + 1\n if IR in self.branchtable:\n self.branchtable[IR](operand_a, operand_b)\n",
"step-2": "<mask token>\n\n\nclass CPU:\n <mask token>\n\n def __init__(self):\n \"\"\"Construct a new CPU.\"\"\"\n self.reg = [0] * 8\n self.pc = 0\n self.ram = [0] * 256\n self.running = True\n self.reg[7] = 244\n self.sp = self.reg[7]\n self.fl = 0\n self.branchtable = {}\n self.branchtable[HLT] = self.op_hlt\n self.branchtable[LDI] = self.op_ldi\n self.branchtable[PRN] = self.op_prn\n self.branchtable[MUL] = self.op_mul\n self.branchtable[PUSH] = self.op_push\n self.branchtable[POP] = self.op_pop\n self.branchtable[CALL] = self.op_call\n self.branchtable[RET] = self.op_ret\n self.branchtable[ADD] = self.op_add\n self.branchtable[CMP] = self.op_cmp\n self.branchtable[JMP] = self.op_jmp\n self.branchtable[JEQ] = self.op_jeq\n self.branchtable[JNE] = self.op_jne\n self.branchtable[AND] = self.op_and\n self.branchtable[NOT] = self.op_not\n self.branchtable[OR] = self.op_or\n self.branchtable[XOR] = self.op_xor\n self.branchtable[SHL] = self.op_shl\n self.branchtable[SHR] = self.op_shr\n self.branchtable[MOD] = self.op_mod\n <mask token>\n\n def ram_write(self, MAR, MDR):\n self.ram[MAR] = MDR\n\n def op_hlt(self, operand_a, operand_b):\n self.running = False\n\n def op_ldi(self, operand_a, operand_b):\n self.reg[operand_a] = operand_b\n\n def op_prn(self, operand_a, operand_b):\n print('prn:', self.reg[operand_a])\n\n def op_mul(self, operand_a, operand_b):\n self.alu('MUL', operand_a, operand_b)\n\n def op_push(self, operand_a, operand_b):\n self.sp -= 1\n val = self.reg[operand_a]\n self.ram_write(self.sp, val)\n\n def op_pop(self, operand_a, operand_b):\n self.reg[operand_a] = self.ram_read(self.sp)\n self.sp += 1\n\n def op_call(self, operand_a, operand_b):\n ret_addr = self.pc + 2\n self.sp -= 1\n self.ram_write(self.sp, ret_addr)\n sub_addr = self.reg[operand_a]\n self.pc = sub_addr\n\n def op_ret(self, operand_a, operand_b):\n ret_addr = self.ram_read(self.sp)\n self.sp += 1\n self.pc = ret_addr\n <mask token>\n\n def op_cmp(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n <mask token>\n\n def op_jne(self, operand_a, operand_b):\n if self.fl != 1:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_and(self, operand_a, operand_b):\n self.alu('AND', operand_a, operand_b)\n\n def op_or(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_xor(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n\n def op_shl(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_shr(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_mod(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n\n def alu(self, op, reg_a, reg_b):\n \"\"\"ALU operations.\"\"\"\n if op == 'ADD':\n self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]\n elif op == 'MUL':\n self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]\n elif op == 'CMP':\n if self.reg[reg_a] < self.reg[reg_b]:\n self.fl = 4\n elif self.reg[reg_a] > self.reg[reg_b]:\n self.fl = 2\n elif self.reg[reg_a] == self.reg[reg_b]:\n self.fl = 1\n elif op == 'AND':\n self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]\n elif op == 'OR':\n self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]\n elif op == 'XOR':\n self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]\n elif op == 'NOT':\n self.reg[reg_a] = ~self.reg[reg_a]\n elif op == 'SHL':\n self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]\n elif op == 'SHR':\n self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]\n elif op == 'MOD':\n if self.reg[reg_b] == 0:\n print('ERROR: divide by 0')\n self.op_hlt()\n else:\n remainder = self.reg[reg_a] % self.reg[reg_b]\n self.reg[reg_a] = remainder\n else:\n raise Exception('Unsupported ALU operation')\n\n def trace(self):\n \"\"\"\n Handy function to print out the CPU state. You might want to call this\n from run() if you need help debugging.\n \"\"\"\n print(f'TRACE: %02X | %02X %02X %02X |' % (self.pc, self.ram_read(\n self.pc), self.ram_read(self.pc + 1), self.ram_read(self.pc + 2\n )), end='')\n for i in range(8):\n print(' %02X' % self.reg[i], end='')\n print()\n\n def run(self):\n \"\"\"Run the CPU.\"\"\"\n self.trace()\n while self.running is True:\n IR = self.ram_read(self.pc)\n operand_a = self.ram_read(self.pc + 1)\n operand_b = self.ram_read(self.pc + 2)\n op_size = IR >> 6\n ins_set = IR >> 4 & 1 == 1\n if not ins_set:\n self.pc += op_size + 1\n if IR in self.branchtable:\n self.branchtable[IR](operand_a, operand_b)\n",
"step-3": "<mask token>\n\n\nclass CPU:\n <mask token>\n\n def __init__(self):\n \"\"\"Construct a new CPU.\"\"\"\n self.reg = [0] * 8\n self.pc = 0\n self.ram = [0] * 256\n self.running = True\n self.reg[7] = 244\n self.sp = self.reg[7]\n self.fl = 0\n self.branchtable = {}\n self.branchtable[HLT] = self.op_hlt\n self.branchtable[LDI] = self.op_ldi\n self.branchtable[PRN] = self.op_prn\n self.branchtable[MUL] = self.op_mul\n self.branchtable[PUSH] = self.op_push\n self.branchtable[POP] = self.op_pop\n self.branchtable[CALL] = self.op_call\n self.branchtable[RET] = self.op_ret\n self.branchtable[ADD] = self.op_add\n self.branchtable[CMP] = self.op_cmp\n self.branchtable[JMP] = self.op_jmp\n self.branchtable[JEQ] = self.op_jeq\n self.branchtable[JNE] = self.op_jne\n self.branchtable[AND] = self.op_and\n self.branchtable[NOT] = self.op_not\n self.branchtable[OR] = self.op_or\n self.branchtable[XOR] = self.op_xor\n self.branchtable[SHL] = self.op_shl\n self.branchtable[SHR] = self.op_shr\n self.branchtable[MOD] = self.op_mod\n <mask token>\n\n def ram_write(self, MAR, MDR):\n self.ram[MAR] = MDR\n\n def op_hlt(self, operand_a, operand_b):\n self.running = False\n\n def op_ldi(self, operand_a, operand_b):\n self.reg[operand_a] = operand_b\n\n def op_prn(self, operand_a, operand_b):\n print('prn:', self.reg[operand_a])\n\n def op_mul(self, operand_a, operand_b):\n self.alu('MUL', operand_a, operand_b)\n\n def op_push(self, operand_a, operand_b):\n self.sp -= 1\n val = self.reg[operand_a]\n self.ram_write(self.sp, val)\n\n def op_pop(self, operand_a, operand_b):\n self.reg[operand_a] = self.ram_read(self.sp)\n self.sp += 1\n\n def op_call(self, operand_a, operand_b):\n ret_addr = self.pc + 2\n self.sp -= 1\n self.ram_write(self.sp, ret_addr)\n sub_addr = self.reg[operand_a]\n self.pc = sub_addr\n\n def op_ret(self, operand_a, operand_b):\n ret_addr = self.ram_read(self.sp)\n self.sp += 1\n self.pc = ret_addr\n\n def op_add(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_cmp(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n <mask token>\n\n def op_jne(self, operand_a, operand_b):\n if self.fl != 1:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_and(self, operand_a, operand_b):\n self.alu('AND', operand_a, operand_b)\n\n def op_or(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_xor(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n\n def op_shl(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_shr(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_mod(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n\n def alu(self, op, reg_a, reg_b):\n \"\"\"ALU operations.\"\"\"\n if op == 'ADD':\n self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]\n elif op == 'MUL':\n self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]\n elif op == 'CMP':\n if self.reg[reg_a] < self.reg[reg_b]:\n self.fl = 4\n elif self.reg[reg_a] > self.reg[reg_b]:\n self.fl = 2\n elif self.reg[reg_a] == self.reg[reg_b]:\n self.fl = 1\n elif op == 'AND':\n self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]\n elif op == 'OR':\n self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]\n elif op == 'XOR':\n self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]\n elif op == 'NOT':\n self.reg[reg_a] = ~self.reg[reg_a]\n elif op == 'SHL':\n self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]\n elif op == 'SHR':\n self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]\n elif op == 'MOD':\n if self.reg[reg_b] == 0:\n print('ERROR: divide by 0')\n self.op_hlt()\n else:\n remainder = self.reg[reg_a] % self.reg[reg_b]\n self.reg[reg_a] = remainder\n else:\n raise Exception('Unsupported ALU operation')\n\n def trace(self):\n \"\"\"\n Handy function to print out the CPU state. You might want to call this\n from run() if you need help debugging.\n \"\"\"\n print(f'TRACE: %02X | %02X %02X %02X |' % (self.pc, self.ram_read(\n self.pc), self.ram_read(self.pc + 1), self.ram_read(self.pc + 2\n )), end='')\n for i in range(8):\n print(' %02X' % self.reg[i], end='')\n print()\n\n def run(self):\n \"\"\"Run the CPU.\"\"\"\n self.trace()\n while self.running is True:\n IR = self.ram_read(self.pc)\n operand_a = self.ram_read(self.pc + 1)\n operand_b = self.ram_read(self.pc + 2)\n op_size = IR >> 6\n ins_set = IR >> 4 & 1 == 1\n if not ins_set:\n self.pc += op_size + 1\n if IR in self.branchtable:\n self.branchtable[IR](operand_a, operand_b)\n",
"step-4": "<mask token>\n\n\nclass CPU:\n <mask token>\n\n def __init__(self):\n \"\"\"Construct a new CPU.\"\"\"\n self.reg = [0] * 8\n self.pc = 0\n self.ram = [0] * 256\n self.running = True\n self.reg[7] = 244\n self.sp = self.reg[7]\n self.fl = 0\n self.branchtable = {}\n self.branchtable[HLT] = self.op_hlt\n self.branchtable[LDI] = self.op_ldi\n self.branchtable[PRN] = self.op_prn\n self.branchtable[MUL] = self.op_mul\n self.branchtable[PUSH] = self.op_push\n self.branchtable[POP] = self.op_pop\n self.branchtable[CALL] = self.op_call\n self.branchtable[RET] = self.op_ret\n self.branchtable[ADD] = self.op_add\n self.branchtable[CMP] = self.op_cmp\n self.branchtable[JMP] = self.op_jmp\n self.branchtable[JEQ] = self.op_jeq\n self.branchtable[JNE] = self.op_jne\n self.branchtable[AND] = self.op_and\n self.branchtable[NOT] = self.op_not\n self.branchtable[OR] = self.op_or\n self.branchtable[XOR] = self.op_xor\n self.branchtable[SHL] = self.op_shl\n self.branchtable[SHR] = self.op_shr\n self.branchtable[MOD] = self.op_mod\n\n def ram_read(self, MAR):\n return self.ram[MAR]\n\n def ram_write(self, MAR, MDR):\n self.ram[MAR] = MDR\n\n def op_hlt(self, operand_a, operand_b):\n self.running = False\n\n def op_ldi(self, operand_a, operand_b):\n self.reg[operand_a] = operand_b\n\n def op_prn(self, operand_a, operand_b):\n print('prn:', self.reg[operand_a])\n\n def op_mul(self, operand_a, operand_b):\n self.alu('MUL', operand_a, operand_b)\n\n def op_push(self, operand_a, operand_b):\n self.sp -= 1\n val = self.reg[operand_a]\n self.ram_write(self.sp, val)\n\n def op_pop(self, operand_a, operand_b):\n self.reg[operand_a] = self.ram_read(self.sp)\n self.sp += 1\n\n def op_call(self, operand_a, operand_b):\n ret_addr = self.pc + 2\n self.sp -= 1\n self.ram_write(self.sp, ret_addr)\n sub_addr = self.reg[operand_a]\n self.pc = sub_addr\n\n def op_ret(self, operand_a, operand_b):\n ret_addr = self.ram_read(self.sp)\n self.sp += 1\n self.pc = ret_addr\n\n def op_add(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_cmp(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n <mask token>\n\n def op_jeq(self, operand_a, operand_b):\n if self.fl == 1:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_jne(self, operand_a, operand_b):\n if self.fl != 1:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_and(self, operand_a, operand_b):\n self.alu('AND', operand_a, operand_b)\n\n def op_or(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_xor(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_not(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_shl(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_shr(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_mod(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def load(self, filename):\n \"\"\"Load a program into memory.\"\"\"\n address = 0\n with open(filename) as file:\n for line in file:\n val = line.split('#')[0].strip()\n if val == '':\n continue\n instruction = int(val, 2)\n self.ram[address] = instruction\n address += 1\n\n def alu(self, op, reg_a, reg_b):\n \"\"\"ALU operations.\"\"\"\n if op == 'ADD':\n self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]\n elif op == 'MUL':\n self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]\n elif op == 'CMP':\n if self.reg[reg_a] < self.reg[reg_b]:\n self.fl = 4\n elif self.reg[reg_a] > self.reg[reg_b]:\n self.fl = 2\n elif self.reg[reg_a] == self.reg[reg_b]:\n self.fl = 1\n elif op == 'AND':\n self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]\n elif op == 'OR':\n self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]\n elif op == 'XOR':\n self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]\n elif op == 'NOT':\n self.reg[reg_a] = ~self.reg[reg_a]\n elif op == 'SHL':\n self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]\n elif op == 'SHR':\n self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]\n elif op == 'MOD':\n if self.reg[reg_b] == 0:\n print('ERROR: divide by 0')\n self.op_hlt()\n else:\n remainder = self.reg[reg_a] % self.reg[reg_b]\n self.reg[reg_a] = remainder\n else:\n raise Exception('Unsupported ALU operation')\n\n def trace(self):\n \"\"\"\n Handy function to print out the CPU state. You might want to call this\n from run() if you need help debugging.\n \"\"\"\n print(f'TRACE: %02X | %02X %02X %02X |' % (self.pc, self.ram_read(\n self.pc), self.ram_read(self.pc + 1), self.ram_read(self.pc + 2\n )), end='')\n for i in range(8):\n print(' %02X' % self.reg[i], end='')\n print()\n\n def run(self):\n \"\"\"Run the CPU.\"\"\"\n self.trace()\n while self.running is True:\n IR = self.ram_read(self.pc)\n operand_a = self.ram_read(self.pc + 1)\n operand_b = self.ram_read(self.pc + 2)\n op_size = IR >> 6\n ins_set = IR >> 4 & 1 == 1\n if not ins_set:\n self.pc += op_size + 1\n if IR in self.branchtable:\n self.branchtable[IR](operand_a, operand_b)\n",
"step-5": "\"\"\"CPU functionality.\"\"\"\n\nimport sys\nHLT = 0b00000001\nLDI = 0b10000010\nPRN = 0b01000111\nMUL = 0b10100010\nPUSH = 0b01000101\nPOP = 0b01000110\nCMP = 0b10100111\nCALL = 0b01010000\nRET = 0b00010001\nADD = 0b10100000\nCMP = 0b10100111\nJMP = 0b01010100\nJEQ = 0b01010101\nJNE = 0b01010110\nAND = 0b10101000\nNOT = 0b01101001\nOR = 0b10101010\nXOR = 0b10101011\nSHL = 0b10101100\nSHR = 0b10101101\nMOD = 0b10100100\n\n\nclass CPU:\n \"\"\"Main CPU class.\"\"\"\n\n def __init__(self):\n \"\"\"Construct a new CPU.\"\"\"\n self.reg = [0] * 8\n self.pc = 0\n self.ram = [0] * 256\n self.running = True\n self.reg[7] = 0xf4\n self.sp = self.reg[7]\n self.fl = 0b00000000\n self.branchtable = {}\n self.branchtable[HLT] = self.op_hlt\n self.branchtable[LDI] = self.op_ldi\n self.branchtable[PRN] = self.op_prn\n self.branchtable[MUL] = self.op_mul\n self.branchtable[PUSH] = self.op_push\n self.branchtable[POP] = self.op_pop\n self.branchtable[CALL] = self.op_call\n self.branchtable[RET] = self.op_ret\n self.branchtable[ADD] = self.op_add\n self.branchtable[CMP] = self.op_cmp\n self.branchtable[JMP] = self.op_jmp\n self.branchtable[JEQ] = self.op_jeq\n self.branchtable[JNE] = self.op_jne\n self.branchtable[AND] = self.op_and\n self.branchtable[NOT] = self.op_not\n self.branchtable[OR] = self.op_or\n self.branchtable[XOR] = self.op_xor\n self.branchtable[SHL] = self.op_shl\n self.branchtable[SHR] = self.op_shr\n self.branchtable[MOD] = self.op_mod\n\n def ram_read(self, MAR):\n return self.ram[MAR]\n\n def ram_write(self, MAR, MDR):\n self.ram[MAR] = MDR\n\n def op_hlt(self, operand_a, operand_b):\n self.running = False\n\n def op_ldi(self, operand_a, operand_b):\n self.reg[operand_a] = operand_b\n # self.pc += 3\n\n def op_prn(self, operand_a, operand_b):\n print('prn:', self.reg[operand_a])\n # self.pc += 2\n\n def op_mul(self, operand_a, operand_b):\n self.alu('MUL', operand_a, operand_b)\n # self.pc += 3\n\n def op_push(self, operand_a, operand_b):\n self.sp -= 1\n val = self.reg[operand_a]\n self.ram_write(self.sp, val)\n # self.pc += 2\n\n def op_pop(self, operand_a, operand_b):\n self.reg[operand_a] = self.ram_read(self.sp)\n # self.pc += 2\n self.sp += 1\n\n def op_call(self, operand_a, operand_b):\n ret_addr = self.pc + 2\n self.sp -= 1\n self.ram_write(self.sp, ret_addr) # write sp and pc location to ram\n sub_addr = self.reg[operand_a]\n self.pc = sub_addr\n\n def op_ret(self, operand_a, operand_b):\n ret_addr = self.ram_read(self.sp) # set ret_addr to location in ram\n self.sp += 1\n self.pc = ret_addr\n\n def op_add(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_cmp(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_jmp(self, operand_a, operand_b):\n self.pc = self.reg[operand_a]\n\n def op_jeq(self, operand_a, operand_b):\n if self.fl == 0b00000001:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_jne(self, operand_a, operand_b):\n if self.fl != 0b00000001:\n self.op_jmp(operand_a, operand_b)\n else:\n self.pc += 2\n\n def op_and(self, operand_a, operand_b):\n self.alu('AND', operand_a, operand_b)\n\n def op_or(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_xor(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_not(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_shl(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def op_shr(self, operand_a, operand_b):\n self.alu('ADD', operand_a, operand_b)\n\n def op_mod(self, operand_a, operand_b):\n self.alu('CMP', operand_a, operand_b)\n\n def load(self, filename):\n \"\"\"Load a program into memory.\"\"\"\n\n address = 0\n\n with open(filename) as file:\n for line in file:\n val = line.split(\"#\")[0].strip()\n if val == '':\n continue\n instruction = int(val, 2)\n self.ram[address] = instruction\n address += 1\n\n # For now, we've just hardcoded a program:\n # program = [\n # # From print8.ls8\n # 0b10000010, # LDI R0,8\n # 0b00000000,\n # 0b00001000,\n # 0b01000111, # PRN R0\n # 0b00000000,\n # 0b00000001, # HLT\n # ]\n # for instruction in program:\n # self.ram[address] = instruction\n # address += 1\n\n def alu(self, op, reg_a, reg_b):\n \"\"\"ALU operations.\"\"\"\n\n if op == 'ADD':\n self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]\n elif op == 'MUL':\n self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]\n elif op == 'CMP':\n if self.reg[reg_a] < self.reg[reg_b]:\n self.fl = 0b00000100\n elif self.reg[reg_a] > self.reg[reg_b]:\n self.fl = 0b00000010\n elif self.reg[reg_a] == self.reg[reg_b]:\n self.fl = 0b00000001\n elif op == 'AND':\n self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]\n elif op == 'OR':\n self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]\n elif op == 'XOR':\n self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]\n elif op == 'NOT':\n self.reg[reg_a] = ~self.reg[reg_a]\n elif op == 'SHL':\n self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]\n elif op == 'SHR':\n self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]\n elif op == 'MOD':\n if self.reg[reg_b] == 0:\n print('ERROR: divide by 0')\n self.op_hlt()\n else:\n remainder = self.reg[reg_a] % self.reg[reg_b]\n self.reg[reg_a] = remainder\n\n else:\n raise Exception(\"Unsupported ALU operation\")\n\n def trace(self):\n \"\"\"\n Handy function to print out the CPU state. You might want to call this\n from run() if you need help debugging.\n \"\"\"\n\n print(f\"TRACE: %02X | %02X %02X %02X |\" % (\n self.pc,\n # self.fl,\n # self.ie,\n self.ram_read(self.pc),\n self.ram_read(self.pc + 1),\n self.ram_read(self.pc + 2)\n ), end='')\n\n for i in range(8):\n print(\" %02X\" % self.reg[i], end='')\n\n print()\n\n def run(self):\n \"\"\"Run the CPU.\"\"\"\n self.trace()\n\n while self.running is True:\n IR = self.ram_read(self.pc)\n operand_a = self.ram_read(self.pc + 1)\n operand_b = self.ram_read(self.pc + 2)\n\n # This increments the pc position automatically\n op_size = IR >> 6\n ins_set = ((IR >> 4) & 0b1) == 1\n if not ins_set:\n self.pc += op_size + 1\n\n if IR in self.branchtable:\n self.branchtable[IR](operand_a, operand_b)\n\n# SAVE WHERE WE'RE COMING FROM TO THE STACK AND SET PC TO WHERE WE'RE GOING\n",
"step-ids": [
13,
22,
23,
27,
32
]
}
|
[
13,
22,
23,
27,
32
] |
# -*- coding: utf-8 -*-
'''
Задание 12.3
Создать функцию print_ip_table, которая отображает таблицу доступных и недоступных IP-адресов.
Функция ожидает как аргументы два списка:
* список доступных IP-адресов
* список недоступных IP-адресов
Результат работы функции - вывод на стандартный поток вывода таблицы вида:
Reachable Unreachable
----------- -------------
10.1.1.1 10.1.1.7
10.1.1.2 10.1.1.8
10.1.1.9
Функция не должна изменять списки, которые переданы ей как аргументы.
То есть, до выполнения функции и после списки должны выглядеть одинаково.
Для этого задания нет тестов
'''
import subprocess
import ipaddress
from tabulate import tabulate
def ping_ip_addresses(ip_addresses):
result1 = []
result2 = []
for ip_address in ip_addresses:
reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8')
if reply.returncode == 0:
result1.append(ip_address)
else:
result2.append(ip_address)
result = tuple([result1,result2])
return result
def convert_ranges_to_ip_list(list_of_ip_addresses):
result=[]
for item in list_of_ip_addresses:
source = item.split('-')
if len(source) == 1:
result.append(source[0])
else:
k = 0
source2 = source[0].split('.')
m = int(source2[3])
if len(source[1]) == 1:
k = int(source[1])
else:
source1 = source[1].split('.')
k = int(source1[3])
ip1 = ipaddress.ip_address(source[0])
for i in range(m, k+1):
result.append(str(ip1))
ip1 += 1
return result
columns = ['Reachable', 'Unreachable']
sh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4', '172.19.30.1-172.19.30.254']))
print(tabulate(sh_ip, headers=columns))
|
normal
|
{
"blob_id": "dd7e8556405f07172ce2b1e9f486c2cd2f4bad58",
"index": 7613,
"step-1": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\n<mask token>\nprint(tabulate(sh_ip, headers=columns))\n",
"step-3": "<mask token>\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4',\n '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-4": "<mask token>\nimport subprocess\nimport ipaddress\nfrom tabulate import tabulate\n\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1, result2])\n return result\n\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result = []\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k + 1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4',\n '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-5": "# -*- coding: utf-8 -*-\n'''\nЗадание 12.3\n\n\nСоздать функцию print_ip_table, которая отображает таблицу доступных и недоступных IP-адресов.\n\nФункция ожидает как аргументы два списка:\n* список доступных IP-адресов\n* список недоступных IP-адресов\n\nРезультат работы функции - вывод на стандартный поток вывода таблицы вида:\n\nReachable Unreachable\n----------- -------------\n10.1.1.1 10.1.1.7\n10.1.1.2 10.1.1.8\n 10.1.1.9\n\nФункция не должна изменять списки, которые переданы ей как аргументы.\nТо есть, до выполнения функции и после списки должны выглядеть одинаково.\n\n\nДля этого задания нет тестов\n'''\nimport subprocess\nimport ipaddress\nfrom tabulate import tabulate\n\ndef ping_ip_addresses(ip_addresses):\n result1 = []\n result2 = []\n for ip_address in ip_addresses:\n reply = subprocess.run(['ping', '-c', '3', '-n', ip_address],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n encoding='utf-8')\n if reply.returncode == 0:\n result1.append(ip_address)\n else:\n result2.append(ip_address)\n result = tuple([result1,result2])\n return result\n\ndef convert_ranges_to_ip_list(list_of_ip_addresses):\n result=[]\n for item in list_of_ip_addresses:\n source = item.split('-')\n if len(source) == 1:\n result.append(source[0])\n else:\n k = 0\n source2 = source[0].split('.')\n m = int(source2[3])\n if len(source[1]) == 1:\n k = int(source[1])\n else:\n source1 = source[1].split('.')\n k = int(source1[3])\n ip1 = ipaddress.ip_address(source[0])\n for i in range(m, k+1):\n result.append(str(ip1))\n ip1 += 1\n return result\n\ncolumns = ['Reachable', 'Unreachable']\nsh_ip = ping_ip_addresses(convert_ranges_to_ip_list(['8.8.4.4', '172.19.30.1-172.19.30.254']))\nprint(tabulate(sh_ip, headers=columns))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
#
# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex", "mscAtmIfVptIndex", "mscAtmIfVcc", "mscAtmIfVptVccIndex", "mscAtmIfVpc", "mscAtmIfVptVcc", "mscAtmIfVccIndex", "mscAtmIfVpcIndex")
mscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIisp", "mscAtmIfVptIisp", "mscAtmIfVptIispIndex", "mscAtmIfIispIndex")
mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrc", "mscAtmIfVptVccSrcIndex", "mscAtmIfVccSrcIndex", "mscAtmIfVptVccSrc", "mscAtmIfVpcSrcIndex", "mscAtmIfVccSrc")
mscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex", "mscAtmIfPnniIndex", "mscAtmIfPnni", "mscAtmIfVptPnni")
mscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUni", "mscAtmIfUni", "mscAtmIfUniIndex", "mscAtmIfVptUniIndex")
Counter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "Counter32", "DisplayString", "Gauge32", "StorageType", "RowStatus")
NonReplicated, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "NonReplicated")
mscPassportMIBs, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscPassportMIBs")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ObjectIdentity", "ModuleIdentity", "Bits", "Counter32", "IpAddress", "Gauge32", "NotificationType", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "Counter64", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
atmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))
mscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))
mscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))
mscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))
mscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))
mscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))
mscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')
mscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')
mscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')
mscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')
mscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')
mscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')
mscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')
mscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')
mscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')
mscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))
mscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatus.setStatus('mandatory')
mscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrComponentName.setStatus('mandatory')
mscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrStorageType.setStatus('mandatory')
mscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfIispEbrIndex.setStatus('mandatory')
mscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfIispEbrProvTable.setStatus('mandatory')
mscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrProvEntry.setStatus('mandatory')
mscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfIispEbrOperTable.setStatus('mandatory')
mscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrOperEntry.setStatus('mandatory')
mscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsTable.setStatus('mandatory')
mscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7))
mscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')
mscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrComponentName.setStatus('mandatory')
mscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStorageType.setStatus('mandatory')
mscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIndex.setStatus('mandatory')
mscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvTable.setStatus('mandatory')
mscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')
mscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperTable.setStatus('mandatory')
mscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')
mscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')
mscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7))
mscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')
mscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')
mscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIndex.setStatus('mandatory')
mscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')
mscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')
mscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7))
mscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrComponentName.setStatus('mandatory')
mscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStorageType.setStatus('mandatory')
mscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIndex.setStatus('mandatory')
mscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvTable.setStatus('mandatory')
mscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperTable.setStatus('mandatory')
mscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2))
mscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12))
mscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))
mscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrComponentName.setStatus('mandatory')
mscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrStorageType.setStatus('mandatory')
mscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfPnniEbrIndex.setStatus('mandatory')
mscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvTable.setStatus('mandatory')
mscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperTable.setStatus('mandatory')
mscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')
atmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))
atmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))
atmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3))
atmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3, 2))
atmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))
atmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1))
atmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3))
atmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3, 2))
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-AtmEbrMIB", mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA, mscAtmIfUniEbrTotalConnectionRecoveries=mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=mscAtmIfVptPnniEbrProvEntry, mscAtmIfVptVccEbrInfoTotalPathOptimizations=mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup, mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery, mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry, mscAtmIfVptUniEbrTotalPathOptimizations=mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr, mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization, mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry, mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType, mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus, mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable, mscAtmIfVptPnniEbrSubscribedConnections=mscAtmIfVptPnniEbrSubscribedConnections, mscAtmIfVccEbrInfoTotalPathOptimizations=mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=mscAtmIfVptUniEbrProvEntry, mscAtmIfVptPnniEbrEligibleRecoveredConnections=mscAtmIfVptPnniEbrEligibleRecoveredConnections, mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName, mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry, mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex, mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType, mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable, mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization, mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry, mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry, mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType, mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry, mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex, mscAtmIfPnniEbrTotalConnectionRecoveries=mscAtmIfPnniEbrTotalConnectionRecoveries, mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable, mscAtmIfPnniEbrEligibleRecoveredConnections=mscAtmIfPnniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoRecoverySubscribed=mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=mscAtmIfVptVccSrcEbrOvProvTable, mscAtmIfVptVccEbrInfoConnectionRecovered=mscAtmIfVptVccEbrInfoConnectionRecovered, mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName, mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName, mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry, mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName, mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry, mscAtmIfVptIispEbrTotalPathOptimizations=mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex, mscAtmIfVptUniEbrIneligibleRecoveredConnections=mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=mscAtmIfVpcSrcEbrOvOptimizationSubscribed, mscAtmIfIispEbrTotalPathOptimizations=mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName=mscAtmIfVccSrcEbrOvComponentName, mscAtmIfVccSrcEbrOvOptimizationSubscribed=mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=mscAtmIfIispEbrRowStatusEntry, mscAtmIfVptIispEbrIneligibleRecoveredConnections=mscAtmIfVptIispEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A, mscAtmIfVptVccEbrInfoOptimizationSubscribed=mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=mscAtmIfPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable, mscAtmIfVptPnniEbrIneligibleRecoveredConnections=mscAtmIfVptPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcEbrInfoConnectionRecovered=mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex, mscAtmIfVptUniEbrTotalConnectionRecoveries=mscAtmIfVptUniEbrTotalConnectionRecoveries, mscAtmIfVpcEbrInfoTotalConnectionRecoveries=mscAtmIfVpcEbrInfoTotalConnectionRecoveries, mscAtmIfVptVccSrcEbrOvRowStatusEntry=mscAtmIfVptVccSrcEbrOvRowStatusEntry, mscAtmIfIispEbrTotalConnectionRecoveries=mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr, mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry, mscAtmIfVptPnniEbrTotalConnectionRecoveries=mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=mscAtmIfVptVccSrcEbrOvComponentName, mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery, mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType, mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv, mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable, mscAtmIfUniEbrEligibleRecoveredConnections=mscAtmIfUniEbrEligibleRecoveredConnections, mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry, mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus, mscAtmIfIispEbrEligibleRecoveredConnections=mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=mscAtmIfPnniEbrConnectionRecovery, mscAtmIfVccEbrInfoConnectionRecovered=mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=mscAtmIfVccEbrInfoStatsEntry, mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=mscAtmIfVptVccEbrInfoTotalConnectionRecoveries, mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType, mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry, mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable, mscAtmIfVccSrcEbrOvRecoverySubscribed=mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=atmEbrCapabilities, mscAtmIfUniEbrComponentName=mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=mscAtmIfPnniEbrTotalPathOptimizations, mscAtmIfUniEbrIneligibleRecoveredConnections=mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A, mscAtmIfVccEbrInfoOptimizationSubscribed=mscAtmIfVccEbrInfoOptimizationSubscribed, mscAtmIfVptVccSrcEbrOvRowStatusTable=mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB, mscAtmIfVptVccEbrInfoRecoverySubscribed=mscAtmIfVptVccEbrInfoRecoverySubscribed, mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry, mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus, mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry, mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType, mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry, mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable, mscAtmIfIispEbrIneligibleRecoveredConnections=mscAtmIfIispEbrIneligibleRecoveredConnections, mscAtmIfVptIispEbrConnectionRecovery=mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02, mscAtmIfVptIispEbrTotalConnectionRecoveries=mscAtmIfVptIispEbrTotalConnectionRecoveries, mscAtmIfUniEbrTotalPathOptimizations=mscAtmIfUniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvRecoverySubscribed=mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=mscAtmIfVptPnniEbrOperTable, mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=mscAtmIfVptVccSrcEbrOvOptimizationSubscribed, mscAtmIfVptUniEbrEligibleRecoveredConnections=mscAtmIfVptUniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoOptimizationSubscribed=mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=mscAtmIfVptVccSrcEbrOvStorageType, mscAtmIfVptPnniEbrTotalPathOptimizations=mscAtmIfVptPnniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName, mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName, mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry, mscAtmIfVptVccSrcEbrOvRecoverySubscribed=mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA, mscAtmIfVccEbrInfoTotalConnectionRecoveries=mscAtmIfVccEbrInfoTotalConnectionRecoveries)
|
normal
|
{
"blob_id": "202670314ad28685aaa296dce4b5094daab3f47a",
"index": 4889,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrComponentName.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStorageType.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIndex.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\n<mask token>\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\n<mask token>\nmibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,\n mscAtmIfUniEbrTotalConnectionRecoveries=\n mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=\n mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=\n mscAtmIfVptPnniEbrProvEntry,\n mscAtmIfVptVccEbrInfoTotalPathOptimizations=\n mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=\n mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=\n mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,\n mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,\n mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,\n mscAtmIfVptUniEbrTotalPathOptimizations=\n mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=\n mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,\n mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,\n mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,\n mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,\n mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,\n mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,\n mscAtmIfVptPnniEbrSubscribedConnections=\n mscAtmIfVptPnniEbrSubscribedConnections,\n mscAtmIfVccEbrInfoTotalPathOptimizations=\n mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=\n mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=\n mscAtmIfVptUniEbrProvEntry,\n mscAtmIfVptPnniEbrEligibleRecoveredConnections=\n mscAtmIfVptPnniEbrEligibleRecoveredConnections,\n mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,\n mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,\n mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,\n mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,\n mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,\n mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,\n mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,\n mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,\n mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,\n mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,\n mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,\n mscAtmIfPnniEbrTotalConnectionRecoveries=\n mscAtmIfPnniEbrTotalConnectionRecoveries,\n mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,\n mscAtmIfPnniEbrEligibleRecoveredConnections=\n mscAtmIfPnniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoRecoverySubscribed=\n mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=\n mscAtmIfVptVccSrcEbrOvProvTable,\n mscAtmIfVptVccEbrInfoConnectionRecovered=\n mscAtmIfVptVccEbrInfoConnectionRecovered,\n mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,\n mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,\n mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,\n mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,\n mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,\n mscAtmIfVptIispEbrTotalPathOptimizations=\n mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=\n mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,\n mscAtmIfVptUniEbrIneligibleRecoveredConnections=\n mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02\n =atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=\n mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=\n mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=\n mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed,\n mscAtmIfIispEbrTotalPathOptimizations=\n mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName\n =mscAtmIfVccSrcEbrOvComponentName,\n mscAtmIfVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=\n mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=\n mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=\n mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=\n mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=\n mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=\n mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=\n mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=\n mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=\n mscAtmIfIispEbrRowStatusEntry,\n mscAtmIfVptIispEbrIneligibleRecoveredConnections=\n mscAtmIfVptIispEbrIneligibleRecoveredConnections,\n atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,\n mscAtmIfVptVccEbrInfoOptimizationSubscribed=\n mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=\n mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=\n mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=\n mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=\n mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=\n mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=\n mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=\n mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=\n mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=\n mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=\n mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=\n mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=\n mscAtmIfPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections=\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoConnectionRecovered=\n mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=\n mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=\n mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=\n mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations\n =mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=\n mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=\n mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=\n mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=\n mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=\n mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,\n mscAtmIfVptUniEbrTotalConnectionRecoveries=\n mscAtmIfVptUniEbrTotalConnectionRecoveries,\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries=\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries,\n mscAtmIfVptVccSrcEbrOvRowStatusEntry=\n mscAtmIfVptVccSrcEbrOvRowStatusEntry,\n mscAtmIfIispEbrTotalConnectionRecoveries=\n mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=\n mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=\n mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=\n mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=\n mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=\n mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=\n mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=\n mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=\n mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=\n mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=\n mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=\n mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=\n mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=\n mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=\n mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry\n =mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=\n mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=\n mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=\n mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=\n mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=\n mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=\n mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=\n mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,\n mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,\n mscAtmIfVptPnniEbrTotalConnectionRecoveries=\n mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=\n mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=\n mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=\n mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=\n mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=\n mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=\n mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=\n mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=\n mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=\n mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=\n mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=\n mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=\n mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=\n mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=\n mscAtmIfVptVccSrcEbrOvComponentName,\n mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,\n mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,\n mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,\n mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,\n mscAtmIfUniEbrEligibleRecoveredConnections=\n mscAtmIfUniEbrEligibleRecoveredConnections,\n mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,\n mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,\n mscAtmIfIispEbrEligibleRecoveredConnections=\n mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=\n mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=\n mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=\n mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=\n mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=\n mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=\n mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=\n mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=\n mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=\n mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=\n mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=\n mscAtmIfPnniEbrConnectionRecovery,\n mscAtmIfVccEbrInfoConnectionRecovered=\n mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=\n mscAtmIfVccEbrInfoStatsEntry,\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,\n mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,\n mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,\n mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,\n mscAtmIfVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=\n atmEbrCapabilities, mscAtmIfUniEbrComponentName=\n mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=\n mscAtmIfPnniEbrTotalPathOptimizations,\n mscAtmIfUniEbrIneligibleRecoveredConnections=\n mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=\n mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=\n mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=\n mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=\n mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=\n mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=\n mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=\n mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=\n mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,\n mscAtmIfVccEbrInfoOptimizationSubscribed=\n mscAtmIfVccEbrInfoOptimizationSubscribed,\n mscAtmIfVptVccSrcEbrOvRowStatusTable=\n mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,\n mscAtmIfVptVccEbrInfoRecoverySubscribed=\n mscAtmIfVptVccEbrInfoRecoverySubscribed,\n mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,\n mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,\n mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,\n mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,\n mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,\n mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,\n mscAtmIfIispEbrIneligibleRecoveredConnections=\n mscAtmIfIispEbrIneligibleRecoveredConnections,\n mscAtmIfVptIispEbrConnectionRecovery=\n mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=\n mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,\n mscAtmIfVptIispEbrTotalConnectionRecoveries=\n mscAtmIfVptIispEbrTotalConnectionRecoveries,\n mscAtmIfUniEbrTotalPathOptimizations=\n mscAtmIfUniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvRecoverySubscribed=\n mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=\n mscAtmIfVptPnniEbrOperTable,\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,\n mscAtmIfVptUniEbrEligibleRecoveredConnections=\n mscAtmIfVptUniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoOptimizationSubscribed=\n mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=\n mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=\n mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=\n mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=\n mscAtmIfVptVccSrcEbrOvStorageType,\n mscAtmIfVptPnniEbrTotalPathOptimizations=\n mscAtmIfVptPnniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,\n mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,\n mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=\n mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,\n mscAtmIfVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-3": "Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols('ASN1',\n 'Integer', 'ObjectIdentifier', 'OctetString')\nNamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')\n(ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint,\n ConstraintsUnion, ConstraintsIntersection) = (mibBuilder.importSymbols(\n 'ASN1-REFINEMENT', 'ValueRangeConstraint', 'SingleValueConstraint',\n 'ValueSizeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection'))\n(mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex,\n mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex) = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmCoreMIB',\n 'mscAtmIfIndex', 'mscAtmIfVptIndex', 'mscAtmIfVcc',\n 'mscAtmIfVptVccIndex', 'mscAtmIfVpc', 'mscAtmIfVptVcc',\n 'mscAtmIfVccIndex', 'mscAtmIfVpcIndex'))\nmscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmIispMIB',\n 'mscAtmIfIisp', 'mscAtmIfVptIisp', 'mscAtmIfVptIispIndex',\n 'mscAtmIfIispIndex'))\n(mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex,\n mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc) = (mibBuilder.\n importSymbols('Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVpcSrc', 'mscAtmIfVptVccSrcIndex', 'mscAtmIfVccSrcIndex',\n 'mscAtmIfVptVccSrc', 'mscAtmIfVpcSrcIndex', 'mscAtmIfVccSrc'))\nmscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmPnniMIB',\n 'mscAtmIfVptPnniIndex', 'mscAtmIfPnniIndex', 'mscAtmIfPnni',\n 'mscAtmIfVptPnni'))\nmscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = (\n mibBuilder.importSymbols('Nortel-MsCarrier-MscPassport-AtmUniMIB',\n 'mscAtmIfVptUni', 'mscAtmIfUni', 'mscAtmIfUniIndex', 'mscAtmIfVptUniIndex')\n )\nCounter32, DisplayString, Gauge32, StorageType, RowStatus = (mibBuilder.\n importSymbols(\n 'Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB',\n 'Counter32', 'DisplayString', 'Gauge32', 'StorageType', 'RowStatus'))\nNonReplicated, = mibBuilder.importSymbols(\n 'Nortel-MsCarrier-MscPassport-TextualConventionsMIB', 'NonReplicated')\nmscPassportMIBs, = mibBuilder.importSymbols(\n 'Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB', 'mscPassportMIBs')\nNotificationGroup, ModuleCompliance = mibBuilder.importSymbols('SNMPv2-CONF',\n 'NotificationGroup', 'ModuleCompliance')\n(Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress,\n Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow,\n MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks) = (\n mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'ObjectIdentity',\n 'ModuleIdentity', 'Bits', 'Counter32', 'IpAddress', 'Gauge32',\n 'NotificationType', 'iso', 'MibScalar', 'MibTable', 'MibTableRow',\n 'MibTableColumn', 'MibIdentifier', 'Unsigned32', 'Counter64', 'TimeTicks'))\nTextualConvention, DisplayString = mibBuilder.importSymbols('SNMPv2-TC',\n 'TextualConvention', 'DisplayString')\natmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))\nmscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 4, 6, 2))\nmscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 6, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 6, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 6, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 6, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 6, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVpcSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec\n =ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, \n 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4,\n 11))\nmscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 4, 11, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 4, 11, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 11, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 4, 11, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 4, 11, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVpcIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVpcEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 5, 8, 2))\nmscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 8, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 8, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 8, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 8, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB', 'mscAtmIfVccSrcIndex'),\n (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec\n =ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, \n 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5,\n 12))\nmscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 5, 12, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 5, 12, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 12, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=\n ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=\n NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 5, 12, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 5, 12, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))\nmscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 6, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrComponentName.setStatus('mandatory')\nmscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStorageType.setStatus('mandatory')\nmscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIndex.setStatus('mandatory')\nmscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,\n 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvTable.setStatus('mandatory')\nmscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6,\n 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperTable.setStatus('mandatory')\nmscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 6, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, \n 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 6, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 6, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))\nmscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 7, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrComponentName.setStatus('mandatory')\nmscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStorageType.setStatus('mandatory')\nmscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIndex.setStatus('mandatory')\nmscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 7, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvTable.setStatus('mandatory')\nmscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 7, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperTable.setStatus('mandatory')\nmscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 7, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 7, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 6, 7))\nmscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 6, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 6, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\nmscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\nmscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIndex.setStatus('mandatory')\nmscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\nmscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(\n subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue\n ='c0')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\nmscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,\n 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 6, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 6, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmIispMIB', 'mscAtmIfVptIispIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptIispEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 7, 7))\nmscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 7, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 7, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(\n subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue\n ='c0')).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6,\n 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 7, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 7, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfVptPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9,\n 8, 7))\nmscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 8, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 8, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIndex.setStatus('mandatory')\nmscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 8, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 8, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, \n 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 8, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 8, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmUniMIB', 'mscAtmIfVptUniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptUniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 9, 20, 8, 2))\nmscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 8, 2, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n 'mscAtmIfVptVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 8, 2, 20))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 8, 2, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmNetworkingMIB',\n 'mscAtmIfVptVccSrcIndex'), (0, 'Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n 'mscAtmIfVptVccSrcEbrOvIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, \n 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1))).clone('yes')).setMaxAccess(\n 'readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 9, 20, 12))\nmscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, \n 2, 1, 114, 9, 20, 12, 1))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 12, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562,\n 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 12, 30))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 12, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(\n subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(\n namedValues=NamedValues(('no', 0), ('yes', 1)))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 9, 20, 12, 40))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 9, 20, 12, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfVptVccIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfVptVccEbrInfoIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess(\n 'readonly')\nif mibBuilder.loadTexts:\n mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))\nmscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 1))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 96, 7, 1, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1,\n 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2,\n 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, \n 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 96, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIndex.setStatus('mandatory')\nmscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 96, 7, 20))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 20, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, \n 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36,\n 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=\n ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue='c0')\n ).setMaxAccess('readwrite')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, \n 96, 7, 30))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 30, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4,\n 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=\n ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1,\n 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(\n subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114,\n 96, 7, 40))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, \n 114, 96, 7, 40, 1)).setIndexNames((0,\n 'Nortel-MsCarrier-MscPassport-AtmCoreMIB', 'mscAtmIfIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmPnniMIB', 'mscAtmIfPnniIndex'), (0,\n 'Nortel-MsCarrier-MscPassport-AtmEbrMIB', 'mscAtmIfPnniEbrIndex'))\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1,\n 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, \n 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess('readonly')\nif mibBuilder.loadTexts:\n mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\natmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))\natmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))\natmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3)\n )\natmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, \n 1, 3, 2))\natmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))\natmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159,\n 3, 1))\natmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, \n 159, 3, 1, 3))\natmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, \n 159, 3, 1, 3, 2))\nmibBuilder.exportSymbols('Nortel-MsCarrier-MscPassport-AtmEbrMIB',\n mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA,\n mscAtmIfUniEbrTotalConnectionRecoveries=\n mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=\n mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=\n mscAtmIfVptPnniEbrProvEntry,\n mscAtmIfVptVccEbrInfoTotalPathOptimizations=\n mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=\n mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=\n mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup,\n mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery,\n mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry,\n mscAtmIfVptUniEbrTotalPathOptimizations=\n mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=\n mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr,\n mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization,\n mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry,\n mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType,\n mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus,\n mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable,\n mscAtmIfVptPnniEbrSubscribedConnections=\n mscAtmIfVptPnniEbrSubscribedConnections,\n mscAtmIfVccEbrInfoTotalPathOptimizations=\n mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=\n mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=\n mscAtmIfVptUniEbrProvEntry,\n mscAtmIfVptPnniEbrEligibleRecoveredConnections=\n mscAtmIfVptPnniEbrEligibleRecoveredConnections,\n mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName,\n mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry,\n mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex,\n mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType,\n mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable,\n mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization,\n mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry,\n mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry,\n mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType,\n mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry,\n mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex,\n mscAtmIfPnniEbrTotalConnectionRecoveries=\n mscAtmIfPnniEbrTotalConnectionRecoveries,\n mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable,\n mscAtmIfPnniEbrEligibleRecoveredConnections=\n mscAtmIfPnniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoRecoverySubscribed=\n mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=\n mscAtmIfVptVccSrcEbrOvProvTable,\n mscAtmIfVptVccEbrInfoConnectionRecovered=\n mscAtmIfVptVccEbrInfoConnectionRecovered,\n mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName,\n mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName,\n mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry,\n mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName,\n mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry,\n mscAtmIfVptIispEbrTotalPathOptimizations=\n mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=\n mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex,\n mscAtmIfVptUniEbrIneligibleRecoveredConnections=\n mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02\n =atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=\n mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=\n mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=\n mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=\n mscAtmIfVpcSrcEbrOvOptimizationSubscribed,\n mscAtmIfIispEbrTotalPathOptimizations=\n mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName\n =mscAtmIfVccSrcEbrOvComponentName,\n mscAtmIfVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=\n mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=\n mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=\n mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=\n mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=\n mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=\n mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=\n mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=\n mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=\n mscAtmIfIispEbrRowStatusEntry,\n mscAtmIfVptIispEbrIneligibleRecoveredConnections=\n mscAtmIfVptIispEbrIneligibleRecoveredConnections,\n atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A,\n mscAtmIfVptVccEbrInfoOptimizationSubscribed=\n mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=\n mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=\n mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=\n mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=\n mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=\n mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=\n mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=\n mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=\n mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=\n mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=\n mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=\n mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=\n mscAtmIfPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable,\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections=\n mscAtmIfVptPnniEbrIneligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoConnectionRecovered=\n mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=\n mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=\n mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=\n mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations\n =mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=\n mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=\n mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=\n mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=\n mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=\n mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex,\n mscAtmIfVptUniEbrTotalConnectionRecoveries=\n mscAtmIfVptUniEbrTotalConnectionRecoveries,\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries=\n mscAtmIfVpcEbrInfoTotalConnectionRecoveries,\n mscAtmIfVptVccSrcEbrOvRowStatusEntry=\n mscAtmIfVptVccSrcEbrOvRowStatusEntry,\n mscAtmIfIispEbrTotalConnectionRecoveries=\n mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=\n mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=\n mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=\n mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=\n mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=\n mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=\n mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=\n mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=\n mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=\n mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=\n mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=\n mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=\n mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=\n mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=\n mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry\n =mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=\n mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=\n mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=\n mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=\n mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=\n mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=\n mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=\n mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr,\n mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry,\n mscAtmIfVptPnniEbrTotalConnectionRecoveries=\n mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=\n mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=\n mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=\n mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=\n mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=\n mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=\n mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=\n mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=\n mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=\n mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=\n mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=\n mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=\n mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=\n mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=\n mscAtmIfVptVccSrcEbrOvComponentName,\n mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery,\n mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType,\n mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv,\n mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable,\n mscAtmIfUniEbrEligibleRecoveredConnections=\n mscAtmIfUniEbrEligibleRecoveredConnections,\n mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry,\n mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus,\n mscAtmIfIispEbrEligibleRecoveredConnections=\n mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=\n mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=\n mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=\n mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=\n mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=\n mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=\n mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=\n mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=\n mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=\n mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=\n mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=\n mscAtmIfPnniEbrConnectionRecovery,\n mscAtmIfVccEbrInfoConnectionRecovered=\n mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=\n mscAtmIfVccEbrInfoStatsEntry,\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVptVccEbrInfoTotalConnectionRecoveries,\n mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType,\n mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry,\n mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable,\n mscAtmIfVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=\n atmEbrCapabilities, mscAtmIfUniEbrComponentName=\n mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=\n mscAtmIfPnniEbrTotalPathOptimizations,\n mscAtmIfUniEbrIneligibleRecoveredConnections=\n mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=\n mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=\n mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=\n mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=\n mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=\n mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=\n mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=\n mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=\n mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A,\n mscAtmIfVccEbrInfoOptimizationSubscribed=\n mscAtmIfVccEbrInfoOptimizationSubscribed,\n mscAtmIfVptVccSrcEbrOvRowStatusTable=\n mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB,\n mscAtmIfVptVccEbrInfoRecoverySubscribed=\n mscAtmIfVptVccEbrInfoRecoverySubscribed,\n mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry,\n mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus,\n mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry,\n mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType,\n mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry,\n mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable,\n mscAtmIfIispEbrIneligibleRecoveredConnections=\n mscAtmIfIispEbrIneligibleRecoveredConnections,\n mscAtmIfVptIispEbrConnectionRecovery=\n mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=\n mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02,\n mscAtmIfVptIispEbrTotalConnectionRecoveries=\n mscAtmIfVptIispEbrTotalConnectionRecoveries,\n mscAtmIfUniEbrTotalPathOptimizations=\n mscAtmIfUniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvRecoverySubscribed=\n mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=\n mscAtmIfVptPnniEbrOperTable,\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=\n mscAtmIfVptVccSrcEbrOvOptimizationSubscribed,\n mscAtmIfVptUniEbrEligibleRecoveredConnections=\n mscAtmIfVptUniEbrEligibleRecoveredConnections,\n mscAtmIfVpcEbrInfoOptimizationSubscribed=\n mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=\n mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=\n mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=\n mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=\n mscAtmIfVptVccSrcEbrOvStorageType,\n mscAtmIfVptPnniEbrTotalPathOptimizations=\n mscAtmIfVptPnniEbrTotalPathOptimizations,\n mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName,\n mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName,\n mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry,\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed=\n mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=\n mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA,\n mscAtmIfVccEbrInfoTotalConnectionRecoveries=\n mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-4": "#\n# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)\n# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB\n# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019\n# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4\n# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) \n#\nInteger, ObjectIdentifier, OctetString = mibBuilder.importSymbols(\"ASN1\", \"Integer\", \"ObjectIdentifier\", \"OctetString\")\nNamedValues, = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\nValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"ValueRangeConstraint\", \"SingleValueConstraint\", \"ValueSizeConstraint\", \"ConstraintsUnion\", \"ConstraintsIntersection\")\nmscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\", \"mscAtmIfVptIndex\", \"mscAtmIfVcc\", \"mscAtmIfVptVccIndex\", \"mscAtmIfVpc\", \"mscAtmIfVptVcc\", \"mscAtmIfVccIndex\", \"mscAtmIfVpcIndex\")\nmscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIisp\", \"mscAtmIfVptIisp\", \"mscAtmIfVptIispIndex\", \"mscAtmIfIispIndex\")\nmscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrc\", \"mscAtmIfVptVccSrcIndex\", \"mscAtmIfVccSrcIndex\", \"mscAtmIfVptVccSrc\", \"mscAtmIfVpcSrcIndex\", \"mscAtmIfVccSrc\")\nmscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\", \"mscAtmIfPnniIndex\", \"mscAtmIfPnni\", \"mscAtmIfVptPnni\")\nmscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUni\", \"mscAtmIfUni\", \"mscAtmIfUniIndex\", \"mscAtmIfVptUniIndex\")\nCounter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB\", \"Counter32\", \"DisplayString\", \"Gauge32\", \"StorageType\", \"RowStatus\")\nNonReplicated, = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-TextualConventionsMIB\", \"NonReplicated\")\nmscPassportMIBs, = mibBuilder.importSymbols(\"Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB\", \"mscPassportMIBs\")\nNotificationGroup, ModuleCompliance = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"NotificationGroup\", \"ModuleCompliance\")\nInteger32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"Integer32\", \"ObjectIdentity\", \"ModuleIdentity\", \"Bits\", \"Counter32\", \"IpAddress\", \"Gauge32\", \"NotificationType\", \"iso\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"MibIdentifier\", \"Unsigned32\", \"Counter64\", \"TimeTicks\")\nTextualConvention, DisplayString = mibBuilder.importSymbols(\"SNMPv2-TC\", \"TextualConvention\", \"DisplayString\")\natmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))\nmscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))\nmscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVpcSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))\nmscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVpcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVpcEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))\nmscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))\nmscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))\nmscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')\nmscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')\nmscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')\nmscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')\nmscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')\nmscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))\nmscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrComponentName.setStatus('mandatory')\nmscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrStorageType.setStatus('mandatory')\nmscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfIispEbrIndex.setStatus('mandatory')\nmscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrProvTable.setStatus('mandatory')\nmscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrOperTable.setStatus('mandatory')\nmscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7))\nmscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrComponentName.setStatus('mandatory')\nmscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStorageType.setStatus('mandatory')\nmscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrIndex.setStatus('mandatory')\nmscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrProvTable.setStatus('mandatory')\nmscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrOperTable.setStatus('mandatory')\nmscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmIispMIB\", \"mscAtmIfVptIispIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptIispEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7))\nmscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrIndex.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfVptPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7))\nmscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')\nmscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrComponentName.setStatus('mandatory')\nmscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStorageType.setStatus('mandatory')\nmscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrIndex.setStatus('mandatory')\nmscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrProvTable.setStatus('mandatory')\nmscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrOperTable.setStatus('mandatory')\nmscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')\nmscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmUniMIB\", \"mscAtmIfVptUniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptUniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2))\nmscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVptVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20), )\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmNetworkingMIB\", \"mscAtmIfVptVccSrcIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccSrcEbrOvIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1))).clone('yes')).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12))\nmscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues((\"no\", 0), (\"yes\", 1)))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40), )\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfVptVccIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfVptVccEbrInfoIndex\"))\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')\nmscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))\nmscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')\nmscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatus.setStatus('mandatory')\nmscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrComponentName.setStatus('mandatory')\nmscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStorageType.setStatus('mandatory')\nmscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 10), NonReplicated())\nif mibBuilder.loadTexts: mscAtmIfPnniEbrIndex.setStatus('mandatory')\nmscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrProvTable.setStatus('mandatory')\nmscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrProvEntry.setStatus('mandatory')\nmscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')\nmscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue=\"c0\")).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')\nmscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrOperTable.setStatus('mandatory')\nmscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrOperEntry.setStatus('mandatory')\nmscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')\nmscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')\nmscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40), )\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStatsTable.setStatus('mandatory')\nmscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1), ).setIndexNames((0, \"Nortel-MsCarrier-MscPassport-AtmCoreMIB\", \"mscAtmIfIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmPnniMIB\", \"mscAtmIfPnniIndex\"), (0, \"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", \"mscAtmIfPnniEbrIndex\"))\nif mibBuilder.loadTexts: mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')\nmscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')\nmscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')\natmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))\natmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))\natmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3))\natmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3, 2))\natmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))\natmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1))\natmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3))\natmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3, 2))\nmibBuilder.exportSymbols(\"Nortel-MsCarrier-MscPassport-AtmEbrMIB\", mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA, mscAtmIfUniEbrTotalConnectionRecoveries=mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=mscAtmIfVptPnniEbrProvEntry, mscAtmIfVptVccEbrInfoTotalPathOptimizations=mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup, mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery, mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry, mscAtmIfVptUniEbrTotalPathOptimizations=mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr, mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization, mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry, mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType, mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus, mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable, mscAtmIfVptPnniEbrSubscribedConnections=mscAtmIfVptPnniEbrSubscribedConnections, mscAtmIfVccEbrInfoTotalPathOptimizations=mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=mscAtmIfVptUniEbrProvEntry, mscAtmIfVptPnniEbrEligibleRecoveredConnections=mscAtmIfVptPnniEbrEligibleRecoveredConnections, mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName, mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry, mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex, mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType, mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable, mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization, mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry, mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry, mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType, mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry, mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex, mscAtmIfPnniEbrTotalConnectionRecoveries=mscAtmIfPnniEbrTotalConnectionRecoveries, mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable, mscAtmIfPnniEbrEligibleRecoveredConnections=mscAtmIfPnniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoRecoverySubscribed=mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=mscAtmIfVptVccSrcEbrOvProvTable, mscAtmIfVptVccEbrInfoConnectionRecovered=mscAtmIfVptVccEbrInfoConnectionRecovered, mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName, mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName, mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry, mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName, mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry, mscAtmIfVptIispEbrTotalPathOptimizations=mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex, mscAtmIfVptUniEbrIneligibleRecoveredConnections=mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=mscAtmIfVpcSrcEbrOvOptimizationSubscribed, mscAtmIfIispEbrTotalPathOptimizations=mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName=mscAtmIfVccSrcEbrOvComponentName, mscAtmIfVccSrcEbrOvOptimizationSubscribed=mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=mscAtmIfIispEbrRowStatusEntry, mscAtmIfVptIispEbrIneligibleRecoveredConnections=mscAtmIfVptIispEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A, mscAtmIfVptVccEbrInfoOptimizationSubscribed=mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=mscAtmIfPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable, mscAtmIfVptPnniEbrIneligibleRecoveredConnections=mscAtmIfVptPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcEbrInfoConnectionRecovered=mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex, mscAtmIfVptUniEbrTotalConnectionRecoveries=mscAtmIfVptUniEbrTotalConnectionRecoveries, mscAtmIfVpcEbrInfoTotalConnectionRecoveries=mscAtmIfVpcEbrInfoTotalConnectionRecoveries, mscAtmIfVptVccSrcEbrOvRowStatusEntry=mscAtmIfVptVccSrcEbrOvRowStatusEntry, mscAtmIfIispEbrTotalConnectionRecoveries=mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr, mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry, mscAtmIfVptPnniEbrTotalConnectionRecoveries=mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=mscAtmIfVptVccSrcEbrOvComponentName, mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery, mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType, mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv, mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable, mscAtmIfUniEbrEligibleRecoveredConnections=mscAtmIfUniEbrEligibleRecoveredConnections, mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry, mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus, mscAtmIfIispEbrEligibleRecoveredConnections=mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=mscAtmIfPnniEbrConnectionRecovery, mscAtmIfVccEbrInfoConnectionRecovered=mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=mscAtmIfVccEbrInfoStatsEntry, mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=mscAtmIfVptVccEbrInfoTotalConnectionRecoveries, mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType, mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry, mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable, mscAtmIfVccSrcEbrOvRecoverySubscribed=mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=atmEbrCapabilities, mscAtmIfUniEbrComponentName=mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=mscAtmIfPnniEbrTotalPathOptimizations, mscAtmIfUniEbrIneligibleRecoveredConnections=mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A, mscAtmIfVccEbrInfoOptimizationSubscribed=mscAtmIfVccEbrInfoOptimizationSubscribed, mscAtmIfVptVccSrcEbrOvRowStatusTable=mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB, mscAtmIfVptVccEbrInfoRecoverySubscribed=mscAtmIfVptVccEbrInfoRecoverySubscribed, mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry, mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus, mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry, mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType, mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry, mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable, mscAtmIfIispEbrIneligibleRecoveredConnections=mscAtmIfIispEbrIneligibleRecoveredConnections, mscAtmIfVptIispEbrConnectionRecovery=mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02, mscAtmIfVptIispEbrTotalConnectionRecoveries=mscAtmIfVptIispEbrTotalConnectionRecoveries, mscAtmIfUniEbrTotalPathOptimizations=mscAtmIfUniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvRecoverySubscribed=mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=mscAtmIfVptPnniEbrOperTable, mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=mscAtmIfVptVccSrcEbrOvOptimizationSubscribed, mscAtmIfVptUniEbrEligibleRecoveredConnections=mscAtmIfVptUniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoOptimizationSubscribed=mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=mscAtmIfVptVccSrcEbrOvStorageType, mscAtmIfVptPnniEbrTotalPathOptimizations=mscAtmIfVptPnniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName, mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName, mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry, mscAtmIfVptVccSrcEbrOvRecoverySubscribed=mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA, mscAtmIfVccEbrInfoTotalConnectionRecoveries=mscAtmIfVccEbrInfoTotalConnectionRecoveries)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Formatters example
#
# Requirements:
# Go to the ../hello_world directory and do: python prepare_data.py
#
# Instructions:
#
# Just run this file:
#
# python table.py
# Output:
# * standard input – text table
# * table.html
# * cross_table.html
#
from cubes import Workspace, create_formatter
workspace = Workspace("slicer.ini")
# Create formatters
text_formatter = create_formatter("text_table")
html_formatter = create_formatter("simple_html_table")
html_cross_formatter = create_formatter("html_cross_table")
# Get the browser and data
browser = workspace.browser("irbd_balance")
result = browser.aggregate(drilldown=["item"])
result = result.cached()
#
# 1. Create text output
#
print "Text output"
print "-----------"
print text_formatter(result, "item")
#
# 2. Create HTML output (see table.html)
#
with open("table.html", "w") as f:
data = html_formatter(result, "item")
f.write(data)
#
# 3. Create cross-table to cross_table.html
#
result = browser.aggregate(drilldown=["item", "year"])
with open("cross_table.html", "w") as f:
data = html_cross_formatter(result,
onrows=["year"],
oncolumns=["item.category_label"])
f.write(data)
print "Check also table.html and cross_table.html files"
|
normal
|
{
"blob_id": "55e743cb027d27cc6b668424c1584f27a8e8c51a",
"index": 5707,
"step-1": "# Formatters example\n#\n# Requirements:\n# Go to the ../hello_world directory and do: python prepare_data.py\n#\n# Instructions:\n#\n# Just run this file:\n#\n# python table.py\n# Output:\n# * standard input – text table\n# * table.html\n# * cross_table.html\n#\n\nfrom cubes import Workspace, create_formatter\n\nworkspace = Workspace(\"slicer.ini\")\n\n# Create formatters\ntext_formatter = create_formatter(\"text_table\")\nhtml_formatter = create_formatter(\"simple_html_table\")\nhtml_cross_formatter = create_formatter(\"html_cross_table\")\n\n# Get the browser and data\n\nbrowser = workspace.browser(\"irbd_balance\")\n\nresult = browser.aggregate(drilldown=[\"item\"])\nresult = result.cached()\n\n#\n# 1. Create text output\n#\nprint \"Text output\"\nprint \"-----------\"\n\nprint text_formatter(result, \"item\")\n\n\n#\n# 2. Create HTML output (see table.html)\n#\nwith open(\"table.html\", \"w\") as f:\n data = html_formatter(result, \"item\")\n f.write(data)\n\n#\n# 3. Create cross-table to cross_table.html\n#\nresult = browser.aggregate(drilldown=[\"item\", \"year\"])\nwith open(\"cross_table.html\", \"w\") as f:\n data = html_cross_formatter(result,\n onrows=[\"year\"],\n oncolumns=[\"item.category_label\"])\n f.write(data)\n\nprint \"Check also table.html and cross_table.html files\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""
Proyecto SA^3
Autor: Mario Lopez
Luis Aviles
Joaquin V
Fecha: Octubre del 2012
versión: 1
"""
#Manejo de temlates en el HTML
import jinja2
from jinja2 import Environment, PackageLoader
import os
import cgi
import datetime
import urllib
# for hashing
import hashlib
#Layer de comunicacion con Modelo
from modelo.Layer import *
from modelo.Layer2 import *
#Framework de Web para Python
import webapp2
# API DataStore
from google.appengine.ext import db
# intitalization of template system. It says that HTML templates will
# be found in current directory ("__file__")
# variable env para sesiones
env = Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
env.filters['format_time'] = format_time
# Método para verificar si hay una sesión activa
def before_filter(fn):
def inner_function(self):
if not 'session' in env.globals:
self.redirect('/')
return fn(self)
return inner_function
"""
REQUEST HANDLERS
"""
class MainPage(webapp2.RequestHandler):
"""Pantalla inicial. Despliega una forma para iniciar sesión
"""
def get(self):
self.response.headers['Content-Type'] = 'text/html'
# Generar el admin
matricula = 'admin'
password = 'admin'
nombre = 'admin'
apellidop = 'admin'
apellidom = 'admin'
tipo = 'admin'
# Generar password
h = hashlib.new('ripemd160')
h.update(password)
md5 = h.hexdigest()
password = md5
#Usuario(matricula = matricula, password = password, nombre = nombre, apellidop = apellidop, apellidom = apellidom, tipo = tipo).put()
#productos = db.GqlQuery("SELECT * FROM Inventario")
#Desplegar lista de productos
_despliegaLogin(self, '/vistas/login.html')
class VerUsuarios(webapp2.RequestHandler):
""" Despliega los usuarios registrados
"""
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
usuarios = db.GqlQuery("SELECT * FROM Usuario")
_despliegaVerUsuarios(self, usuarios, '/vistas/verUsuarios.html')
class RegistroAlumno(webapp2.RequestHandler):
""" Formulario para registrar usuarios
"""
def get(self):
self.response.headers['Content-Type'] = 'text/html'
clinicas = db.GqlQuery("SELECT * FROM Clinica")
_despliegaRegistroAlumno(self, clinicas, '/vistas/registroAlumno.html')
class GrabaAlumno(webapp2.RequestHandler):
def post(self):
nombre = self.request.get('nombre')
matricula = self.request.get('matricula')
password = self.request.get('password')
# Generar password
h = hashlib.new('ripemd160')
h.update(password)
md5 = h.hexdigest()
password = md5
class RegistraUsuario(webapp2.RequestHandler):
""" Formulario para registrar usuarios
"""
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaRegistraUsuario(self, '/vistas/registraUsuario.html')
class GrabaUsuario(webapp2.RequestHandler):
def post(self):
nombre = self.request.get('nombre')
matricula = self.request.get('matricula')
password = self.request.get('password')
apellidop = self.request.get('apellidop')
apellidom = self.request.get('apellidom')
tipo = self.request.get('tipo')
# Generar password
h = hashlib.new('ripemd160')
h.update(password)
md5 = h.hexdigest()
password = md5
grabaUsuario(matricula,password,nombre,apellidop,apellidom,tipo)
self.redirect('/verUsuarios')
class IniciaSesion(webapp2.RequestHandler):
""" Entrada: al dar click en iniciar sesión en la pantalla principal
Salida: se crea la sesión del usuario y lo redirige a....
"""
def post(self):
self.response.headers['Content-Type'] = 'text/html'
matricula = self.request.get('matricula')
password = self.request.get('password')
h = hashlib.new('ripemd160')
h.update(password)
md5 = h.hexdigest()
password = md5
user = db.GqlQuery("SELECT * FROM Usuario WHERE matricula = '" + matricula + "' AND password = '" + password + "'")
if user.count() == 1:
for u in user:
user = []
user.append(u.nombre)
user.append(u.matricula)
user.append(u.tipo)
user.append(u.key())
env.globals['session'] = user
self.redirect('/bienvenida')
else:
self.redirect('/')
class CerrarSesion(webapp2.RequestHandler):
""" Entrada: al dar click en cerrar sesión
Salida: se elimina la sesión actual y se
redirige a la pantalla para iniciar sesión
"""
def get(self):
del env.globals['session']
self.redirect('/')
class Bienvenida(webapp2.RequestHandler):
""" Pantalla que se muestra al iniciar sesion
"""
@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaBienvenida(self, '/vistas/bienvenida.html')
class AgregaHorarioClinica(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
clinicas = getAllClinicas()
_despliegaAgregaHorarioClinica(self,clinicas, '/vistas/agregarHorarioClinica.html')
#=======================================Funciones de Clinica
class AgregarClinica(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaAgregarClinica(self, '/vistas/Clinica/agregarClinica.html')
class GrabaClinica(webapp2.RequestHandler):
def post(self):
key = self.request.get('key')
nombre = self.request.get('nombre')
descripcion = self.request.get('descripcion')
localizacion = self.request.get('localizacion')
unidades = int(self.request.get('unidades'))
defectuosas = int(self.request.get('defectuosas'))
if(key == None or key ==""):
grabaClinica(nombre,descripcion,localizacion,unidades,defectuosas)
else:
actualizaClinica(key,nombre,descripcion,localizacion,unidades,defectuosas)
self.redirect('/verClinicas') #Redireccion a la vista de clinicas
class EliminaClinica(webapp2.RequestHandler):
def get(self):
key = self.request.get('key')
eliminaClinica(key)
self.redirect('/verClinicas') #Redireccion a las clinicas
class VerClinicas(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
time.sleep(.1)
clinicas = getAllClinicas()
_despliegaVerClinicas(self, clinicas, '/vistas/Clinica/verClinicas.html')
class EditaClinica(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
clinica = db.get(self.request.get('key'))
_despliegaEditaClinica(self, clinica, '/vistas/Clinica/editaClinica.html')
#=======================================Fin de manejos de Clinicas
#=======================================Inicia Manejo de Grupos
class AgregarGrupo(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaAgregarGrupo(self,self.request.get('key'), '/vistas/Grupo/agregarGrupo.html')
class GrabarGrupo(webapp2.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/html'
key = self.request.get('key')
clinica = self.request.get('clinica')
nombre = self.request.get('nombre')
descripcion = self.request.get('descripcion')
inicioAgenda = self.request.get('inicioAgenda')
finAgenda = self.request.get('finAgenda')
fa = self.request.get('fa')
if(key == None or key == ""):
grabaGrupo(clinica,nombre,descripcion,inicioAgenda,finAgenda,fa)
else:
actualizaGrupo(key,nombre,descripcion,inicioAgenda,finAgenda,fa)
self.redirect('/verGrupos?key='+clinica) #Redireccion a la vista de Grupos de una Clinica
class EliminarGrupo(webapp2.RequestHandler):
def get(self):
key = self.request.get('key')
eliminaGrupo(key)
self.redirect('/verGrupos?key='+self.request.get('clinica')) #Redireccion a la vista de los Grupos
class VerGrupos(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
clinica = getObject(self.request.get('key'))
_despliegaVerGrupos(self,clinica, getGrupos(clinica.key()), '/vistas/Grupo/verGrupos.html')
class EditarGrupo(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
grupo = db.get(self.request.get('key'))
clinica = self.request.get('clinica')
_despliegaEditaGrupo(self, clinica, grupo, '/vistas/Grupo/editaGrupo.html')
#=======================================Fin de manejo de Grupos
#=======================================Inicia Manejo de Asignacion de Grupo
class UsuariosAsignacion(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type']= 'text/html'
usuarios = getAllUsuarios()
_despliegaUsuariosAsignacion(self,usuarios,'/vistas/Asignacion/verUsuarios.html')
class ClinicasAsignacion(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type']= 'text/html'
clinicas = getAllClinicas()
usuario = getObject(self.request.get('usuario'))
_despliegaClinicasAsignacion(self,usuario,clinicas,'/vistas/Asignacion/verClinicas.html')
class GruposAsignacion(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type']= 'text/html'
clinica = getObject(self.request.get('clinica'))
usuario = getObject(self.request.get('usuario'))
_despliegaGruposAsignacion(self,usuario,clinica,'/vistas/Asignacion/verGrupos.html')
class GuardaAsignacion(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type']= 'text/html'
grupo = self.request.get('grupo')
usuario = self.request.get('usuario')
#Crea la asignacion entre ambos objetos
creaAsignacion(usuario,grupo)
_despliegaExito(self,"Usuario Asignado Correctamente",'/asignaUsuarios1','/vistas/Exito.html')
#=======================================Fin de Manejo de Asignacion de Grupo
class AgregarHorario(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaAgregarHorario(self,self.request.get('key'), '/vistas/Horario/agregarHorario.html')
class GrabarHorario(webapp2.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/html'
key = self.request.get('key')
grupo = self.request.get('grupo')
descripcion = self.request.get('descripcion')
dia = self.request.get('dia')
horaInicio = self.request.get('horaInicio')
horaFin = self.request.get('horaFin')
if(key == None or key == ""):
grabaHorario(grupo,descripcion,dia,horaInicio,horaFin)
else:
actualizaGrupo(key,descripcion,dia,horaInicio,horaFin)
self.redirect('/verHorarios?key='+grupo) #Redireccion a la vista de Grupos de una Clinica
class EliminarHorario(webapp2.RequestHandler):
def get(self):
key = self.request.get('key')
eliminaHorario(key)
self.redirect('/verHorarios?key='+self.request.get('grupo')) #Redireccion a la vista de Horarios
class VerHorarios(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
#horarios = getAllHorarios(self.request.get('key'))
grupo = getObject(self.request.get('key'))
_despliegaVerHorarios(self,grupo, getHorarios(grupo), '/vistas/Horario/verHorarios.html')
class EditarHorario(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
horario = db.get(self.request.get('key'))
grupo = self.request.get('grupo')
_despliegaEditaHorario(self, grupo, horario, '/vistas/Horario/editaHorario.html')
#=======================================Fin de manejo de Horario
class EliminaUsuario(webapp2.RequestHandler):
def get(self):
usuarioKey = self.request.get('key')
deleteUsuario(usuarioKey)
self.redirect('/verUsuarios')
class EditaUsuario(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
usuarioKey = self.request.get('key')
usuario = getUsuario(usuarioKey);
_despliegaEditaUsuario(self, usuario, '/vistas/editaUsuario.html')
class GuardaCambiosUsuario(webapp2.RequestHandler):
def post(self):
usuarioKey = self.request.get('usuarioKey')
nombre = self.request.get('nombre')
matricula = self.request.get('matricula')
apellidop = self.request.get('apellidop')
apellidom = self.request.get('apellidom')
tipo = self.request.get('tipo')
usuario = getUsuario(usuarioKey);
updateUsuario(usuario,nombre,matricula,apellidop,apellidom,tipo)
self.redirect('/verUsuarios')
#====================================Inicia Proceso de Agendas
class AgendaPacienteExample(webapp2.RequestHandler):
def get(self):
horario = self.request.get('horario')
disponible = verificaDisponibilidadExample(horario)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write('Total:<br/>')
self.response.out.write(disponible)
class AgendaPaciente(webapp2.RequestHandler):
def post(self):
horario = self.request.get('horario')
descripcion = self.request.get('descripcion')
folio = self.request.get('folio')
usuario = env.globals.get('session')[3]
disponible = verificaDisponibilidad(horario,usuario,descripcion,folio)
self.response.headers['Content-Type'] = 'text/html'
if (disponible[1] == True):
_despliegaExito(self,"El usuario ha agendado correctamente (No."+str(disponible[0])+")",'/verHorariosUsuario','/vistas/Exito.html')
else:
_despliegaError(self,"Agenda Llena ("+str(disponible[0])+" Pacientes), no es posible agendar",'/verHorariosUsuario','/vistas/Error.html')
class VerFormaCita(webapp2.RequestHandler):
def get(self):
horario = self.request.get('horario')
self.response.headers['Content-Type'] = 'text/html'
_despliegaFormaCita(self,horario,'/vistas/Alumno/agendaForma.html')
class VerGruposUsuario(webapp2.RequestHandler):
def get(self):
k=env.globals.get('session')
key = k[3]
usuario = db.get(key)
grupos = usuario.grupos
self.response.headers['Content-Type'] = 'text/html'
_despliegaGruposUsuario(self,usuario,grupos, '/vistas/Alumno/verGrupos.html')
class VerHorariosUsuario(webapp2.RequestHandler):
def get(self):
usuario = env.globals.get('session')[3]
horarios = getAgendaValida(usuario)
self.response.headers['Content-Type'] = 'text/html'
_despliegaHorariosUsuario(self,horarios, '/vistas/Alumno/verHorarios.html')
#===================================Finaliza Proceso de agendas
#=======================================Inicia Manejo de Periodos
class AgregarPeriodo(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/html'
_despliegaAgregarPeriodo(self, '/vistas/Periodo/agregarPeriodo.html')
class GrabarPeriodo(webapp2.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/html'
descripcion = self.request.get('descripcion')
fechaInicio = self.request.get('fechaInicio')
fechaFin = self.request.get('fechaFin')
actual = self.request.get('actual')
if actual == '1':
esActual = True
quitaActual()
else:
esActual = False
fi = to_datetime(fechaInicio)
ff = to_datetime(fechaFin)
grabaPeriodo(descripcion,fi,ff,esActual)
self.redirect('/verPeriodo') #Redireccion a la vista de Grupos de una Clinica
class EliminarPeriodo(webapp2.RequestHandler):
def get(self):
key = self.request.get('key')
deletePeriodo(key)
self.redirect('/verPeriodo') #Redireccion a la vista de Horarios
class VerPeriodo(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
#horarios = getAllHorarios(self.request.get('key'))
periodos = getAllPeriodos()
_despliegaVerPeriodo(self,periodos, '/vistas/Periodo/verPeriodo.html')
class EditarPeriodo(webapp2.RequestHandler):
#@before_filter
def get(self):
self.response.headers['Content-Type'] = 'text/html'
periodoKey = self.request.get('key')
periodo = getPeriodo(periodoKey)
_despliegaEditaPeriodo(self, periodo, '/vistas/Periodo/editaPeriodo.html')
class GrabarCambiosPeriodo(webapp2.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/html'
descripcion = self.request.get('descripcion')
fechaInicio = self.request.get('fechaInicio')
fechaFin = self.request.get('fechaFin')
actual = self.request.get('actual')
if actual == '1':
esActual = True
quitaActual()
else:
esActual = False
fi = to_datetime(fechaInicio)
ff = to_datetime(fechaFin)
periodoKey = self.request.get('key')
periodo = getPeriodo(periodoKey)
updatePeriodo(periodo,descripcion,fi,ff,esActual)
self.redirect('/verPeriodo') #Redireccion a la vista de Grupos de una Clinica
"""
Views
"""
def _despliegaLogin(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
def _despliegaRegistraCita(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
def _despliegaFormaCita(self,horario, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'horario':horario}))
def _despliegaVerUsuarios(self, usuarios, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'usuarios': usuarios }))
def _despliegaBienvenida(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
def _despliegaRegistroAlumno(self, clinicas, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'clinicas': clinicas }))
def _despliegaRegistraUsuario(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
def _despliegaAgregaHorarioClinica(self, clinicas, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'clinicas': clinicas }))
def _despliegaMostrarHorariosClinica(self, horarios,clinica, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'horarios': horarios,'clinica':clinica }))
def _despliegaAgregarClinica(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
"""
Despliega la vista para agregar un grupo nuevo
"""
def _despliegaAgregarGrupo(self,clinica, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'clinica':clinica}))
def _despliegaAgregarHorario(self,grupo, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupo':grupo}))
def _despliegaVerClinicas(self, clinicas, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'clinicas': clinicas }))
def _despliegaEditaUsuario(self, usuario, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'usuario': usuario }))
"""
Vista de Grupos de una Clinica en Especial
"""
def _despliegaVerGrupos(self, clinica, grupos, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupos': grupos,'clinica':clinica}))
"""
Vista de Grupos de una Clinica en Especial
"""
def _despliegaVerHorarios(self, grupo, horarios, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupo': grupo,'horarios':horarios}))
"""
Vista para editar Un grupo en especial
"""
def _despliegaEditaGrupo(self,clinica,grupo, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupo':grupo,'clinica':clinica}))
"""
Vista para ver usuarios del sistema
"""
def _despliegaUsuariosAsignacion(self,usuarios, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'usuarios':usuarios}))
"""
Vista para ver clinicas para asignar
"""
def _despliegaClinicasAsignacion(self,usuario,clinicas,templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'usuario':usuario,'clinicas':clinicas}))
"""
Vista para ver grupos a asignar
"""
def _despliegaGruposAsignacion(self,usuario,clinica,templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'usuario':usuario,'clinica':clinica}))
"""
Despliega un mensaje de Exito y la liga de retorno
"""
def _despliegaExito(self,mensaje,liga,templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'mensaje':mensaje,'liga':liga}))
def _despliegaError(self,mensaje,liga,templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'mensaje':mensaje,'liga':liga}))
"""
Vista para editar Un horario
"""
def _despliegaEditaHorario(self,grupo, horario, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupo':grupo,'horario':horario}))
"""
Vista de los Grupos a los que pertenece un usuario
"""
def _despliegaGruposUsuario(self,usuario,grupos, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'grupos':grupos,'usuario':usuario}))
def _despliegaEditaClinica(self, clinica, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'clinica': clinica }))
def _despliegaHorariosUsuario(self, horarios, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'horarios': horarios }))
"""
Vistas para manejo de periodos
"""
def _despliegaAgregarPeriodo(self, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({}))
def _despliegaVerPeriodo(self,periodos, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'periodos':periodos}))
def _despliegaEditaPeriodo(self, periodo, templateFile):
template = env.get_template(templateFile)
self.response.out.write(template.render({'periodo':periodo}))
app = webapp2.WSGIApplication([('/', MainPage),
('/iniciaSesion', IniciaSesion),
('/bienvenida', Bienvenida),
('/verUsuarios', VerUsuarios),
('/registroAlumno', RegistroAlumno),
('/grabaAlumno', GrabaAlumno),
('/registraUsuario', RegistraUsuario),
('/grabaUsuario', GrabaUsuario),
('/verClinicas', VerClinicas),
('/agregarClinica', AgregarClinica),
('/agregaHorarioClinica', AgregaHorarioClinica),
('/agregarHorario', AgregarHorario),
#Manejo de Clinicas
('/grabaClinica', GrabaClinica),
('/cerrarSesion', CerrarSesion),
('/grabaClinica', GrabaClinica),
('/eliminaUsuario', EliminaUsuario),
('/editaUsuario', EditaUsuario),
('/editaClinica', EditaClinica),
('/eliminaClinica', EliminaClinica),
('/verClinicas', VerClinicas),
('/agregarClinica', AgregarClinica),
#Fin manejo de Clinica
#Inicio de Manejo de Grupos
('/verGrupos', VerGrupos),
('/grabarGrupo', GrabarGrupo),
('/eliminarGrupo', EliminarGrupo),
('/agregarGrupo', AgregarGrupo),
('/editarGrupo', EditarGrupo),
#Fin de manejo de Grupo
#Inicio de Manejo de Horarios
('/verHorarios', VerHorarios),
('/grabarHorario', GrabarHorario),
('/eliminarHorario', EliminarHorario),
('/agregarHorario', AgregarHorario),
('/editarHorario', EditarHorario),
#Fin de manejo de Grupo
#Inicio de Agregar periodos
('/agregarPeriodo', AgregarPeriodo),
('/grabarPeriodo', GrabarPeriodo),
('/verPeriodo', VerPeriodo),
('/editarPeriodo', EditarPeriodo),
('/eliminarPeriodo', EliminarPeriodo),
('/grabarCambiosPeriodo', GrabarCambiosPeriodo),
#Finaliza manejo de periodos
#Inicia manejo de Asignacion
('/asignaUsuarios1', UsuariosAsignacion),
('/asignaUsuarios2', ClinicasAsignacion),
('/asignaUsuarios3', GruposAsignacion),
('/guardaAsignacion', GuardaAsignacion),
#Finaliza manejo de Asignacion
#Inicia Agenda
('/agendaPaciente',AgendaPaciente),
('/agendaPacienteExample',AgendaPacienteExample),
('/verGruposUsuario',VerGruposUsuario),
('/verHorariosUsuario',VerHorariosUsuario),
('/verFormaCita',VerFormaCita),
#Finaliza Agenda
('/cerrarSesion', CerrarSesion),
('/guardaCambiosUsuario', GuardaCambiosUsuario)], debug=True)
|
normal
|
{
"blob_id": "51cb750082ce93b6d14fe3aa40711836d493129c",
"index": 3692,
"step-1": "\"\"\"\r\nProyecto SA^3\r\nAutor: \tMario Lopez\r\n Luis Aviles\r\n\t\tJoaquin V\r\nFecha: Octubre del 2012\r\nversión: 1\r\n\"\"\"\r\n\r\n#Manejo de temlates en el HTML\r\nimport jinja2 \r\nfrom jinja2 import Environment, PackageLoader\r\n\r\nimport os\r\nimport cgi\r\nimport datetime\r\nimport urllib\r\n# for hashing\r\nimport hashlib\r\n#Layer de comunicacion con Modelo\r\nfrom modelo.Layer import *\r\nfrom modelo.Layer2 import *\r\n#Framework de Web para Python\r\nimport webapp2\r\n \r\n# API DataStore\r\nfrom google.appengine.ext import db\r\n\r\n# intitalization of template system. It says that HTML templates will\r\n# be found in current directory (\"__file__\")\r\n# variable env para sesiones\r\nenv = Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))\r\nenv.filters['format_time'] = format_time\n\r\n# Método para verificar si hay una sesión activa\r\ndef before_filter(fn):\r\n\tdef inner_function(self):\r\n\t\tif not 'session' in env.globals:\r\n\t\t\tself.redirect('/')\r\n\t\treturn fn(self)\r\n\treturn inner_function\r\n\r\n\"\"\"\r\nREQUEST HANDLERS\r\n\"\"\"\r\n\r\nclass MainPage(webapp2.RequestHandler):\r\n \"\"\"Pantalla inicial. Despliega una forma para iniciar sesión\r\n \"\"\"\r\n \r\n def get(self):\r\n\r\n self.response.headers['Content-Type'] = 'text/html'\r\n \r\n # Generar el admin\r\n matricula = 'admin'\r\n password = 'admin'\r\n nombre = 'admin'\r\n apellidop = 'admin'\r\n apellidom = 'admin'\r\n tipo = 'admin'\r\n # Generar password\r\n h = hashlib.new('ripemd160')\r\n h.update(password)\r\n md5 = h.hexdigest()\r\n password = md5\r\n\r\n #Usuario(matricula = matricula, password = password, nombre = nombre, apellidop = apellidop, apellidom = apellidom, tipo = tipo).put()\r\n \r\n #productos = db.GqlQuery(\"SELECT * FROM Inventario\")\r\n \r\n #Desplegar lista de productos\r\n _despliegaLogin(self, '/vistas/login.html')\r\n\t\t\t\r\n\r\nclass VerUsuarios(webapp2.RequestHandler):\r\n\t\"\"\" Despliega los usuarios registrados\r\n\t\"\"\"\r\n\t\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\t\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t\t\t\r\n\t\tusuarios = db.GqlQuery(\"SELECT * FROM Usuario\")\r\n\t\t\r\n\t\t_despliegaVerUsuarios(self, usuarios, '/vistas/verUsuarios.html')\r\n\r\nclass RegistroAlumno(webapp2.RequestHandler):\r\n\t\"\"\" Formulario para registrar usuarios\r\n\t\"\"\"\r\n\t\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t\r\n\t\tclinicas = db.GqlQuery(\"SELECT * FROM Clinica\")\r\n\t\t\r\n\t\t_despliegaRegistroAlumno(self, clinicas, '/vistas/registroAlumno.html')\r\n\r\nclass GrabaAlumno(webapp2.RequestHandler):\r\n\t\r\n\tdef post(self):\r\n\t\tnombre = self.request.get('nombre')\r\n\t\tmatricula = self.request.get('matricula')\r\n\t\tpassword = self.request.get('password')\r\n\t\t\r\n\t\t# Generar password\r\n\t\th = hashlib.new('ripemd160')\r\n\t\th.update(password)\r\n\t\tmd5 = h.hexdigest()\r\n\t\t\r\n\t\tpassword = md5\r\n\r\nclass RegistraUsuario(webapp2.RequestHandler):\r\n\t\"\"\" Formulario para registrar usuarios\r\n\t\"\"\"\r\n\t\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaRegistraUsuario(self, '/vistas/registraUsuario.html')\r\n\r\nclass GrabaUsuario(webapp2.RequestHandler):\r\n\t\r\n\tdef post(self):\r\n\t\tnombre = self.request.get('nombre')\r\n\t\tmatricula = self.request.get('matricula')\r\n\t\tpassword = self.request.get('password')\r\n\t\tapellidop = self.request.get('apellidop')\r\n\t\tapellidom = self.request.get('apellidom')\r\n\t\ttipo = self.request.get('tipo')\r\n\t\t\r\n\t\t# Generar password\r\n\t\th = hashlib.new('ripemd160')\r\n\t\th.update(password)\r\n\t\tmd5 = h.hexdigest()\r\n\t\t\r\n\t\tpassword = md5\r\n\t\t\r\n\t\tgrabaUsuario(matricula,password,nombre,apellidop,apellidom,tipo)\r\n\t\tself.redirect('/verUsuarios')\r\n\r\nclass IniciaSesion(webapp2.RequestHandler):\r\n\t\"\"\" Entrada: al dar click en iniciar sesión en la pantalla principal\r\n\t\tSalida: se crea la sesión del usuario y lo redirige a....\r\n\t\"\"\"\r\n\t\r\n\tdef post(self):\t\t\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tmatricula = self.request.get('matricula')\r\n\t\tpassword = self.request.get('password')\r\n\t\t\r\n\t\th = hashlib.new('ripemd160')\r\n\t\th.update(password)\r\n\t\tmd5 = h.hexdigest()\r\n\t\tpassword = md5\r\n\t\t\r\n\t\tuser = db.GqlQuery(\"SELECT * FROM Usuario WHERE matricula = '\" + matricula + \"' AND password = '\" + password + \"'\")\r\n\t\t\r\n\t\tif user.count() == 1:\r\n\t\t\tfor u in user:\r\n\t\t\t\tuser = []\r\n\t\t\t\tuser.append(u.nombre)\r\n\t\t\t\tuser.append(u.matricula)\r\n\t\t\t\tuser.append(u.tipo)\r\n\t\t\t\tuser.append(u.key())\n\t\t\t\tenv.globals['session'] = user\r\n\t\t\t\tself.redirect('/bienvenida')\r\n\t\telse:\r\n\t\t\tself.redirect('/')\r\n\r\nclass CerrarSesion(webapp2.RequestHandler):\r\n\t\"\"\" Entrada: al dar click en cerrar sesión\r\n\t\tSalida: se elimina la sesión actual y se\r\n\t\tredirige a la pantalla para iniciar sesión\r\n\t\"\"\"\r\n\t\r\n\tdef get(self):\r\n\t\tdel env.globals['session']\r\n\t\tself.redirect('/')\r\n\r\nclass Bienvenida(webapp2.RequestHandler):\r\n\t\"\"\"\tPantalla que se muestra al iniciar sesion\r\n\t\"\"\"\r\n\t\r\n\t@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaBienvenida(self, '/vistas/bienvenida.html')\r\n\r\nclass AgregaHorarioClinica(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tclinicas = getAllClinicas()\r\n\t\t_despliegaAgregaHorarioClinica(self,clinicas, '/vistas/agregarHorarioClinica.html')\r\n\r\n\r\n#=======================================Funciones de Clinica\r\nclass AgregarClinica(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaAgregarClinica(self, '/vistas/Clinica/agregarClinica.html')\r\n\r\nclass GrabaClinica(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tkey = self.request.get('key')\r\n\t\tnombre = self.request.get('nombre')\r\n\t\tdescripcion = self.request.get('descripcion')\r\n\t\tlocalizacion = self.request.get('localizacion')\r\n\t\tunidades = int(self.request.get('unidades'))\r\n\t\tdefectuosas = int(self.request.get('defectuosas'))\r\n\t\tif(key == None or key ==\"\"):\r\n\t\t\tgrabaClinica(nombre,descripcion,localizacion,unidades,defectuosas)\r\n\t\telse:\r\n\t\t\tactualizaClinica(key,nombre,descripcion,localizacion,unidades,defectuosas)\r\n\t\tself.redirect('/verClinicas') #Redireccion a la vista de clinicas\r\n\r\nclass EliminaClinica(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tkey = self.request.get('key')\r\n\t\teliminaClinica(key)\r\n\t\tself.redirect('/verClinicas') #Redireccion a las clinicas\r\n\nclass VerClinicas(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\ttime.sleep(.1)\n\t\tclinicas = getAllClinicas()\r\n\t\t_despliegaVerClinicas(self, clinicas, '/vistas/Clinica/verClinicas.html')\r\n\r\nclass EditaClinica(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tclinica = db.get(self.request.get('key'))\r\n\t\t_despliegaEditaClinica(self, clinica, '/vistas/Clinica/editaClinica.html')\r\n\r\n#=======================================Fin de manejos de Clinicas\r\n#=======================================Inicia Manejo de Grupos\r\nclass AgregarGrupo(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaAgregarGrupo(self,self.request.get('key'), '/vistas/Grupo/agregarGrupo.html')\r\n\r\nclass GrabarGrupo(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tkey = self.request.get('key')\r\n\t\tclinica = self.request.get('clinica')\r\n\t\tnombre = self.request.get('nombre')\r\n\t\tdescripcion = self.request.get('descripcion')\r\n\t\tinicioAgenda = self.request.get('inicioAgenda')\n\t\tfinAgenda = self.request.get('finAgenda')\n\t\tfa = self.request.get('fa')\n\t\tif(key == None or key == \"\"):\r\n\t\t\tgrabaGrupo(clinica,nombre,descripcion,inicioAgenda,finAgenda,fa)\r\n\t\telse:\r\n\t\t\tactualizaGrupo(key,nombre,descripcion,inicioAgenda,finAgenda,fa)\r\n\t\tself.redirect('/verGrupos?key='+clinica) #Redireccion a la vista de Grupos de una Clinica\r\n\r\nclass EliminarGrupo(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tkey = self.request.get('key')\r\n\t\teliminaGrupo(key)\r\n\t\tself.redirect('/verGrupos?key='+self.request.get('clinica')) #Redireccion a la vista de los Grupos\r\n\r\nclass VerGrupos(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tclinica = getObject(self.request.get('key'))\r\n\t\t_despliegaVerGrupos(self,clinica, getGrupos(clinica.key()), '/vistas/Grupo/verGrupos.html')\r\n\r\nclass EditarGrupo(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tgrupo = db.get(self.request.get('key'))\r\n\t\tclinica = self.request.get('clinica')\r\n\t\t_despliegaEditaGrupo(self, clinica, grupo, '/vistas/Grupo/editaGrupo.html')\r\n\r\n#=======================================Fin de manejo de Grupos\r\n#=======================================Inicia Manejo de Asignacion de Grupo\r\nclass UsuariosAsignacion(webapp2.RequestHandler):\n\tdef get(self):\n\t\tself.response.headers['Content-Type']= 'text/html'\n\t\tusuarios = getAllUsuarios()\n\t\t_despliegaUsuariosAsignacion(self,usuarios,'/vistas/Asignacion/verUsuarios.html')\n\nclass ClinicasAsignacion(webapp2.RequestHandler):\n\tdef get(self):\n\t\tself.response.headers['Content-Type']= 'text/html'\n\t\tclinicas = getAllClinicas()\n\t\tusuario = getObject(self.request.get('usuario'))\n\t\t_despliegaClinicasAsignacion(self,usuario,clinicas,'/vistas/Asignacion/verClinicas.html')\n\nclass GruposAsignacion(webapp2.RequestHandler):\n\tdef get(self):\n\t\tself.response.headers['Content-Type']= 'text/html'\n\t\tclinica = getObject(self.request.get('clinica'))\n\t\tusuario = getObject(self.request.get('usuario'))\n\t\t_despliegaGruposAsignacion(self,usuario,clinica,'/vistas/Asignacion/verGrupos.html')\n\nclass GuardaAsignacion(webapp2.RequestHandler):\n\tdef get(self):\n\t\tself.response.headers['Content-Type']= 'text/html'\n\t\tgrupo = self.request.get('grupo')\n\t\tusuario = self.request.get('usuario')\n\t\t#Crea la asignacion entre ambos objetos\n\t\tcreaAsignacion(usuario,grupo)\n\t\t_despliegaExito(self,\"Usuario Asignado Correctamente\",'/asignaUsuarios1','/vistas/Exito.html')\n#=======================================Fin de Manejo de Asignacion de Grupo\r\n\nclass AgregarHorario(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaAgregarHorario(self,self.request.get('key'), '/vistas/Horario/agregarHorario.html')\r\n\r\nclass GrabarHorario(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tkey = self.request.get('key')\r\n\t\tgrupo = self.request.get('grupo')\r\n\t\tdescripcion = self.request.get('descripcion')\r\n\t\tdia = self.request.get('dia')\r\n\t\thoraInicio = self.request.get('horaInicio')\r\n\t\thoraFin = self.request.get('horaFin')\r\n\t\tif(key == None or key == \"\"):\r\n\t\t\tgrabaHorario(grupo,descripcion,dia,horaInicio,horaFin)\r\n\t\telse:\r\n\t\t\tactualizaGrupo(key,descripcion,dia,horaInicio,horaFin)\r\n\t\tself.redirect('/verHorarios?key='+grupo) #Redireccion a la vista de Grupos de una Clinica\r\n\r\nclass EliminarHorario(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tkey = self.request.get('key')\r\n\t\teliminaHorario(key)\r\n\t\tself.redirect('/verHorarios?key='+self.request.get('grupo')) #Redireccion a la vista de Horarios\r\n\r\nclass VerHorarios(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t#horarios = getAllHorarios(self.request.get('key'))\r\n\t\tgrupo = getObject(self.request.get('key'))\r\n\t\t_despliegaVerHorarios(self,grupo, getHorarios(grupo), '/vistas/Horario/verHorarios.html')\r\n\r\nclass EditarHorario(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\thorario = db.get(self.request.get('key'))\r\n\t\tgrupo = self.request.get('grupo')\r\n\t\t_despliegaEditaHorario(self, grupo, horario, '/vistas/Horario/editaHorario.html')\r\n\r\n#=======================================Fin de manejo de Horario\r\n\r\nclass EliminaUsuario(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tusuarioKey = self.request.get('key')\r\n\t\tdeleteUsuario(usuarioKey)\r\n\t\tself.redirect('/verUsuarios')\r\n\r\nclass EditaUsuario(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tusuarioKey = self.request.get('key')\r\n\t\tusuario = getUsuario(usuarioKey);\r\n\t\t_despliegaEditaUsuario(self, usuario, '/vistas/editaUsuario.html')\r\n\nclass GuardaCambiosUsuario(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tusuarioKey = self.request.get('usuarioKey')\r\n\t\tnombre = self.request.get('nombre')\r\n\t\tmatricula = self.request.get('matricula')\r\n\t\tapellidop = self.request.get('apellidop')\r\n\t\tapellidom = self.request.get('apellidom')\r\n\t\ttipo = self.request.get('tipo')\r\n\t\t\r\n\t\tusuario = getUsuario(usuarioKey);\r\n\t\tupdateUsuario(usuario,nombre,matricula,apellidop,apellidom,tipo)\r\n\t\tself.redirect('/verUsuarios')\r\n#====================================Inicia Proceso de Agendas\nclass AgendaPacienteExample(webapp2.RequestHandler):\n\tdef get(self):\n\t\thorario = self.request.get('horario')\n\t\tdisponible = verificaDisponibilidadExample(horario)\n\t\tself.response.headers['Content-Type'] = 'text/html'\n\t\tself.response.out.write('Total:<br/>')\n\t\tself.response.out.write(disponible)\n\nclass AgendaPaciente(webapp2.RequestHandler):\n\tdef post(self):\n\t\thorario = self.request.get('horario')\n\t\tdescripcion = self.request.get('descripcion')\n\t\tfolio = self.request.get('folio')\n\t\tusuario = env.globals.get('session')[3]\n\t\tdisponible = verificaDisponibilidad(horario,usuario,descripcion,folio)\n\t\tself.response.headers['Content-Type'] = 'text/html'\n\t\tif (disponible[1] == True):\n\t\t\t_despliegaExito(self,\"El usuario ha agendado correctamente (No.\"+str(disponible[0])+\")\",'/verHorariosUsuario','/vistas/Exito.html')\n\t\telse:\n\t\t\t_despliegaError(self,\"Agenda Llena (\"+str(disponible[0])+\" Pacientes), no es posible agendar\",'/verHorariosUsuario','/vistas/Error.html')\n\nclass VerFormaCita(webapp2.RequestHandler):\n\tdef get(self):\n\t\thorario = self.request.get('horario')\n\t\tself.response.headers['Content-Type'] = 'text/html'\n\t\t_despliegaFormaCita(self,horario,'/vistas/Alumno/agendaForma.html')\n\t\t\n\nclass VerGruposUsuario(webapp2.RequestHandler):\n\tdef get(self):\n\t\tk=env.globals.get('session')\n\t\tkey = k[3]\n\t\tusuario = db.get(key)\n\t\tgrupos = usuario.grupos\n\t\tself.response.headers['Content-Type'] = 'text/html'\n\t\t_despliegaGruposUsuario(self,usuario,grupos, '/vistas/Alumno/verGrupos.html')\r\n\nclass VerHorariosUsuario(webapp2.RequestHandler):\n\tdef get(self):\n\t\tusuario = env.globals.get('session')[3]\n\t\thorarios = getAgendaValida(usuario)\n\t\tself.response.headers['Content-Type'] = 'text/html'\n\t\t_despliegaHorariosUsuario(self,horarios, '/vistas/Alumno/verHorarios.html')\r\n#===================================Finaliza Proceso de agendas\n\r\n#=======================================Inicia Manejo de Periodos\r\nclass AgregarPeriodo(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t_despliegaAgregarPeriodo(self, '/vistas/Periodo/agregarPeriodo.html')\r\n\r\nclass GrabarPeriodo(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tdescripcion = self.request.get('descripcion')\r\n\t\tfechaInicio = self.request.get('fechaInicio')\r\n\t\tfechaFin = self.request.get('fechaFin')\r\n\t\tactual = self.request.get('actual')\r\n\t\t\r\n\t\tif actual == '1':\r\n\t\t\tesActual = True\r\n\t\t\tquitaActual()\r\n\t\telse:\r\n\t\t\tesActual = False\r\n\t\t\t\t\r\n\t\tfi = to_datetime(fechaInicio)\r\n\t\tff = to_datetime(fechaFin)\r\n\r\n\t\tgrabaPeriodo(descripcion,fi,ff,esActual)\r\n\t\tself.redirect('/verPeriodo') #Redireccion a la vista de Grupos de una Clinica\r\n\r\nclass EliminarPeriodo(webapp2.RequestHandler):\r\n\tdef get(self):\r\n\t\tkey = self.request.get('key')\r\n\t\tdeletePeriodo(key)\r\n\t\tself.redirect('/verPeriodo') #Redireccion a la vista de Horarios\r\n\r\nclass VerPeriodo(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\t#horarios = getAllHorarios(self.request.get('key'))\r\n\t\tperiodos = getAllPeriodos()\r\n\t\t_despliegaVerPeriodo(self,periodos, '/vistas/Periodo/verPeriodo.html')\r\n\t\t\r\nclass EditarPeriodo(webapp2.RequestHandler):\r\n\t#@before_filter\r\n\tdef get(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tperiodoKey = self.request.get('key')\r\n\t\tperiodo = getPeriodo(periodoKey)\r\n\t\t_despliegaEditaPeriodo(self, periodo, '/vistas/Periodo/editaPeriodo.html')\r\n\r\nclass GrabarCambiosPeriodo(webapp2.RequestHandler):\r\n\tdef post(self):\r\n\t\tself.response.headers['Content-Type'] = 'text/html'\r\n\t\tdescripcion = self.request.get('descripcion')\r\n\t\tfechaInicio = self.request.get('fechaInicio')\r\n\t\tfechaFin = self.request.get('fechaFin')\r\n\t\tactual = self.request.get('actual')\r\n\t\t\r\n\t\tif actual == '1':\r\n\t\t\tesActual = True\r\n\t\t\tquitaActual()\r\n\t\telse:\r\n\t\t\tesActual = False\r\n\t\t\t\t\r\n\t\tfi = to_datetime(fechaInicio)\r\n\t\tff = to_datetime(fechaFin)\r\n\t\t\r\n\t\tperiodoKey = self.request.get('key')\r\n\t\tperiodo = getPeriodo(periodoKey)\r\n\t\t\r\n\t\tupdatePeriodo(periodo,descripcion,fi,ff,esActual)\r\n\t\tself.redirect('/verPeriodo') #Redireccion a la vista de Grupos de una Clinica\r\n\r\n\"\"\"\r\nViews\r\n\"\"\"\r\n\r\ndef _despliegaLogin(self, templateFile):\r\n template = env.get_template(templateFile)\r\n self.response.out.write(template.render({}))\r\n\r\ndef _despliegaRegistraCita(self, templateFile):\r\n template = env.get_template(templateFile)\r\n self.response.out.write(template.render({}))\r\n\ndef _despliegaFormaCita(self,horario, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'horario':horario}))\r\n\r\ndef _despliegaVerUsuarios(self, usuarios, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'usuarios': usuarios }))\r\n \r\ndef _despliegaBienvenida(self, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({}))\r\n\t\t\r\ndef _despliegaRegistroAlumno(self, clinicas, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'clinicas': clinicas }))\r\n\t\t\r\ndef _despliegaRegistraUsuario(self, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({}))\r\n\r\ndef _despliegaAgregaHorarioClinica(self, clinicas, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'clinicas': clinicas }))\r\n\r\ndef _despliegaMostrarHorariosClinica(self, horarios,clinica, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'horarios': horarios,'clinica':clinica }))\r\n\r\ndef _despliegaAgregarClinica(self, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({}))\r\n\r\n\"\"\"\r\nDespliega la vista para agregar un grupo nuevo\r\n\"\"\"\r\ndef _despliegaAgregarGrupo(self,clinica, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'clinica':clinica}))\r\ndef _despliegaAgregarHorario(self,grupo, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupo':grupo}))\r\ndef _despliegaVerClinicas(self, clinicas, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'clinicas': clinicas }))\r\n\t\t\r\ndef _despliegaEditaUsuario(self, usuario, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'usuario': usuario }))\r\n\r\n\"\"\"\r\n Vista de Grupos de una Clinica en Especial\r\n\"\"\"\r\ndef _despliegaVerGrupos(self, clinica, grupos, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupos': grupos,'clinica':clinica}))\r\n\r\n\"\"\"\r\n Vista de Grupos de una Clinica en Especial\r\n\"\"\"\r\ndef _despliegaVerHorarios(self, grupo, horarios, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupo': grupo,'horarios':horarios}))\r\n\r\n\"\"\"\r\n\tVista para editar Un grupo en especial\r\n\"\"\"\r\ndef _despliegaEditaGrupo(self,clinica,grupo, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupo':grupo,'clinica':clinica}))\r\n\r\n\n\"\"\"\r\n\tVista para ver usuarios del sistema\n\"\"\"\r\ndef _despliegaUsuariosAsignacion(self,usuarios, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'usuarios':usuarios}))\r\n\n\"\"\"\r\n\tVista para ver clinicas para asignar\n\"\"\"\r\ndef _despliegaClinicasAsignacion(self,usuario,clinicas,templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'usuario':usuario,'clinicas':clinicas}))\r\n\n\"\"\"\r\n\tVista para ver grupos a asignar\n\"\"\"\r\ndef _despliegaGruposAsignacion(self,usuario,clinica,templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'usuario':usuario,'clinica':clinica}))\r\n\n\"\"\"\r\n\tDespliega un mensaje de Exito y la liga de retorno\n\"\"\"\r\ndef _despliegaExito(self,mensaje,liga,templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'mensaje':mensaje,'liga':liga}))\r\n\ndef _despliegaError(self,mensaje,liga,templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'mensaje':mensaje,'liga':liga}))\r\n\"\"\"\r\n\tVista para editar Un horario\n\"\"\"\r\ndef _despliegaEditaHorario(self,grupo, horario, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupo':grupo,'horario':horario}))\r\n\"\"\"\r\n\tVista de los Grupos a los que pertenece un usuario\n\"\"\"\r\ndef _despliegaGruposUsuario(self,usuario,grupos, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'grupos':grupos,'usuario':usuario}))\r\n\ndef _despliegaEditaClinica(self, clinica, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'clinica': clinica }))\r\n\ndef _despliegaHorariosUsuario(self, horarios, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'horarios': horarios }))\r\n\r\n\"\"\"\r\n\tVistas para manejo de periodos\r\n\"\"\"\r\ndef _despliegaAgregarPeriodo(self, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({}))\r\n\t\t\r\ndef _despliegaVerPeriodo(self,periodos, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'periodos':periodos}))\r\n\r\ndef _despliegaEditaPeriodo(self, periodo, templateFile):\r\n\t\ttemplate = env.get_template(templateFile)\r\n\t\tself.response.out.write(template.render({'periodo':periodo}))\r\n\r\napp = webapp2.WSGIApplication([('/', MainPage),\r\n ('/iniciaSesion', IniciaSesion),\r\n ('/bienvenida', Bienvenida),\r\n ('/verUsuarios', VerUsuarios),\r\n ('/registroAlumno', RegistroAlumno),\r\n ('/grabaAlumno', GrabaAlumno),\r\n ('/registraUsuario', RegistraUsuario),\r\n ('/grabaUsuario', GrabaUsuario),\r\n ('/verClinicas', VerClinicas),\r\n ('/agregarClinica', AgregarClinica),\r\n ('/agregaHorarioClinica', AgregaHorarioClinica),\r\n ('/agregarHorario', AgregarHorario),\r\n #Manejo de Clinicas\r\n ('/grabaClinica', GrabaClinica),\r\n ('/cerrarSesion', CerrarSesion),\r\n ('/grabaClinica', GrabaClinica),\r\n ('/eliminaUsuario', EliminaUsuario),\r\n ('/editaUsuario', EditaUsuario),\r\n ('/editaClinica', EditaClinica),\r\n ('/eliminaClinica', EliminaClinica),\r\n ('/verClinicas', VerClinicas),\r\n ('/agregarClinica', AgregarClinica),\r\n #Fin manejo de Clinica\r\n #Inicio de Manejo de Grupos\r\n ('/verGrupos', VerGrupos),\r\n ('/grabarGrupo', GrabarGrupo),\r\n ('/eliminarGrupo', EliminarGrupo),\r\n ('/agregarGrupo', AgregarGrupo),\r\n ('/editarGrupo', EditarGrupo),\r\n #Fin de manejo de Grupo\r\n #Inicio de Manejo de Horarios\r\n ('/verHorarios', VerHorarios),\r\n ('/grabarHorario', GrabarHorario),\r\n ('/eliminarHorario', EliminarHorario),\r\n ('/agregarHorario', AgregarHorario),\r\n ('/editarHorario', EditarHorario),\r\n #Fin de manejo de Grupo\r\n\t\t\t\t\t\t\t #Inicio de Agregar periodos\r\n\t\t\t\t\t\t\t ('/agregarPeriodo', AgregarPeriodo),\r\n\t\t\t\t\t\t\t ('/grabarPeriodo', GrabarPeriodo),\r\n\t\t\t\t\t\t\t ('/verPeriodo', VerPeriodo),\r\n\t\t\t\t\t\t\t ('/editarPeriodo', EditarPeriodo),\r\n\t\t\t\t\t\t\t ('/eliminarPeriodo', EliminarPeriodo),\r\n\t\t\t\t\t\t\t ('/grabarCambiosPeriodo', GrabarCambiosPeriodo),\r\n\t\t\t\t\t\t\t\t#Finaliza manejo de periodos\n\t\t\t\t\t\t\t\t#Inicia manejo de Asignacion\n ('/asignaUsuarios1', UsuariosAsignacion),\r\n\t ('/asignaUsuarios2', ClinicasAsignacion),\r\n\t\t ('/asignaUsuarios3', GruposAsignacion),\r\n\t\t\t ('/guardaAsignacion', GuardaAsignacion),\r\t\n\t\t\t\t#Finaliza manejo de Asignacion\n\t\t\t\t#Inicia Agenda\n\t\t\t ('/agendaPaciente',AgendaPaciente),\r\n\t\t\t ('/agendaPacienteExample',AgendaPacienteExample),\r\n\t\t\t ('/verGruposUsuario',VerGruposUsuario),\r\n\t\t\t ('/verHorariosUsuario',VerHorariosUsuario),\r\n\t\t\t ('/verFormaCita',VerFormaCita),\r\n\t\t\t\t#Finaliza Agenda\n ('/cerrarSesion', CerrarSesion),\n\t\t\t\t('/guardaCambiosUsuario', GuardaCambiosUsuario)], debug=True)\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import boto3
from app.models import *
from app.config import *
from app.lib.log import save_races_to_db, save_laptimes_to_db
from app.utils.utils import get_sec
import pandas as pd
def import_csv_from_aws():
client = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY
)
client.download_file('ergast-csv','filtered_laptimes.csv','filtered_laptimes.csv')
client.download_file('ergast-csv','filtered_races.csv','filtered_races.csv')
df_lapTimes = pd.read_csv('filtered_laptimes.csv')
df_races = pd.read_csv('filtered_races.csv')
df_races['round'] = df_races['round'].astype(int)
df_races['season'] = df_races['season'].astype(int)
df_lapTimes.rename(columns={"driverId":"driverRef"}, inplace=True)
df_lapTimes = pd.merge(df_lapTimes, df_races[['raceId', 'raceName', 'season', 'round']], on=['raceId'], how='left')
df_lapTimes.fillna("0:00:00", inplace=True)
df_lapTimes['time'] = df_lapTimes['time'].map(lambda x: get_sec(x))
df_lapTimes = df_lapTimes[["driverRef", "season", "raceId", "raceName", "round", "lap", "time", "position"]]
df_lapTimes.rename(columns={"round":"roundId"}, inplace=True)
save_races_to_db(df_races, db.session)
for i, group in df_lapTimes.groupby("raceId"):
g = group.drop(["raceId"], axis=1)
b.session.bulk_insert_mappings(LapTimes, g.to_dict("records"))
db.session.commit()
|
normal
|
{
"blob_id": "b573db8ea0845fb947636b8d82ed462904c6005d",
"index": 5519,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef import_csv_from_aws():\n client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID,\n aws_secret_access_key=AWS_SECRET_ACCESS_KEY)\n client.download_file('ergast-csv', 'filtered_laptimes.csv',\n 'filtered_laptimes.csv')\n client.download_file('ergast-csv', 'filtered_races.csv',\n 'filtered_races.csv')\n df_lapTimes = pd.read_csv('filtered_laptimes.csv')\n df_races = pd.read_csv('filtered_races.csv')\n df_races['round'] = df_races['round'].astype(int)\n df_races['season'] = df_races['season'].astype(int)\n df_lapTimes.rename(columns={'driverId': 'driverRef'}, inplace=True)\n df_lapTimes = pd.merge(df_lapTimes, df_races[['raceId', 'raceName',\n 'season', 'round']], on=['raceId'], how='left')\n df_lapTimes.fillna('0:00:00', inplace=True)\n df_lapTimes['time'] = df_lapTimes['time'].map(lambda x: get_sec(x))\n df_lapTimes = df_lapTimes[['driverRef', 'season', 'raceId', 'raceName',\n 'round', 'lap', 'time', 'position']]\n df_lapTimes.rename(columns={'round': 'roundId'}, inplace=True)\n save_races_to_db(df_races, db.session)\n for i, group in df_lapTimes.groupby('raceId'):\n g = group.drop(['raceId'], axis=1)\n b.session.bulk_insert_mappings(LapTimes, g.to_dict('records'))\n db.session.commit()\n",
"step-3": "import boto3\nfrom app.models import *\nfrom app.config import *\nfrom app.lib.log import save_races_to_db, save_laptimes_to_db\nfrom app.utils.utils import get_sec\nimport pandas as pd\n\n\ndef import_csv_from_aws():\n client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID,\n aws_secret_access_key=AWS_SECRET_ACCESS_KEY)\n client.download_file('ergast-csv', 'filtered_laptimes.csv',\n 'filtered_laptimes.csv')\n client.download_file('ergast-csv', 'filtered_races.csv',\n 'filtered_races.csv')\n df_lapTimes = pd.read_csv('filtered_laptimes.csv')\n df_races = pd.read_csv('filtered_races.csv')\n df_races['round'] = df_races['round'].astype(int)\n df_races['season'] = df_races['season'].astype(int)\n df_lapTimes.rename(columns={'driverId': 'driverRef'}, inplace=True)\n df_lapTimes = pd.merge(df_lapTimes, df_races[['raceId', 'raceName',\n 'season', 'round']], on=['raceId'], how='left')\n df_lapTimes.fillna('0:00:00', inplace=True)\n df_lapTimes['time'] = df_lapTimes['time'].map(lambda x: get_sec(x))\n df_lapTimes = df_lapTimes[['driverRef', 'season', 'raceId', 'raceName',\n 'round', 'lap', 'time', 'position']]\n df_lapTimes.rename(columns={'round': 'roundId'}, inplace=True)\n save_races_to_db(df_races, db.session)\n for i, group in df_lapTimes.groupby('raceId'):\n g = group.drop(['raceId'], axis=1)\n b.session.bulk_insert_mappings(LapTimes, g.to_dict('records'))\n db.session.commit()\n",
"step-4": "import boto3\nfrom app.models import *\nfrom app.config import *\nfrom app.lib.log import save_races_to_db, save_laptimes_to_db\nfrom app.utils.utils import get_sec\nimport pandas as pd\n\ndef import_csv_from_aws():\n\n\tclient = boto3.client(\n\t\t's3',\n\t\taws_access_key_id=AWS_ACCESS_KEY_ID,\n\t\taws_secret_access_key=AWS_SECRET_ACCESS_KEY\n\t)\n\n\tclient.download_file('ergast-csv','filtered_laptimes.csv','filtered_laptimes.csv')\n\tclient.download_file('ergast-csv','filtered_races.csv','filtered_races.csv')\n\n\tdf_lapTimes = pd.read_csv('filtered_laptimes.csv')\n\tdf_races = pd.read_csv('filtered_races.csv')\n\n\tdf_races['round'] = df_races['round'].astype(int)\n\tdf_races['season'] = df_races['season'].astype(int)\n\n\tdf_lapTimes.rename(columns={\"driverId\":\"driverRef\"}, inplace=True)\n\tdf_lapTimes = pd.merge(df_lapTimes, df_races[['raceId', 'raceName', 'season', 'round']], on=['raceId'], how='left')\n\n\tdf_lapTimes.fillna(\"0:00:00\", inplace=True)\n\tdf_lapTimes['time'] = df_lapTimes['time'].map(lambda x: get_sec(x))\n\n\tdf_lapTimes = df_lapTimes[[\"driverRef\", \"season\", \"raceId\", \"raceName\", \"round\", \"lap\", \"time\", \"position\"]]\n\tdf_lapTimes.rename(columns={\"round\":\"roundId\"}, inplace=True)\n\n\tsave_races_to_db(df_races, db.session)\n\n\tfor i, group in df_lapTimes.groupby(\"raceId\"):\n\n\t\tg = group.drop([\"raceId\"], axis=1)\n\t\tb.session.bulk_insert_mappings(LapTimes, g.to_dict(\"records\"))\n\t\tdb.session.commit()\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Tests for parsers.py
@author Kevin Wilson <[email protected]>
"""
import crisis.parsers as undertest
import datetime
import unittest
class TestParsers(unittest.TestCase):
def test_parse_date(self):
date = '8/5/2013 16:14'
self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14),
undertest.parse_date(date))
def test_part_date_short(self):
date = '8/5/13 16:14'
self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14),
undertest.parse_date_short(date))
def test_parse_line(self):
line = ["1","2","3"]
actual = undertest.parse_line(line)
expected = [1,2,3]
self.assertTrue(all(x == y for x, y in zip(expected, actual)))
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "253d37f29e33f61d7e1a5ec2f9a1d6307a2ae108",
"index": 6921,
"step-1": "<mask token>\n\n\nclass TestParsers(unittest.TestCase):\n <mask token>\n\n def test_part_date_short(self):\n date = '8/5/13 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date_short(date))\n\n def test_parse_line(self):\n line = ['1', '2', '3']\n actual = undertest.parse_line(line)\n expected = [1, 2, 3]\n self.assertTrue(all(x == y for x, y in zip(expected, actual)))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestParsers(unittest.TestCase):\n\n def test_parse_date(self):\n date = '8/5/2013 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date(date))\n\n def test_part_date_short(self):\n date = '8/5/13 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date_short(date))\n\n def test_parse_line(self):\n line = ['1', '2', '3']\n actual = undertest.parse_line(line)\n expected = [1, 2, 3]\n self.assertTrue(all(x == y for x, y in zip(expected, actual)))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestParsers(unittest.TestCase):\n\n def test_parse_date(self):\n date = '8/5/2013 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date(date))\n\n def test_part_date_short(self):\n date = '8/5/13 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date_short(date))\n\n def test_parse_line(self):\n line = ['1', '2', '3']\n actual = undertest.parse_line(line)\n expected = [1, 2, 3]\n self.assertTrue(all(x == y for x, y in zip(expected, actual)))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nimport crisis.parsers as undertest\nimport datetime\nimport unittest\n\n\nclass TestParsers(unittest.TestCase):\n\n def test_parse_date(self):\n date = '8/5/2013 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date(date))\n\n def test_part_date_short(self):\n date = '8/5/13 16:14'\n self.assertEqual(datetime.datetime(2013, 8, 5, 16, 14), undertest.\n parse_date_short(date))\n\n def test_parse_line(self):\n line = ['1', '2', '3']\n actual = undertest.parse_line(line)\n expected = [1, 2, 3]\n self.assertTrue(all(x == y for x, y in zip(expected, actual)))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "\"\"\"\nTests for parsers.py\n\n@author Kevin Wilson <[email protected]>\n\"\"\"\nimport crisis.parsers as undertest\n\nimport datetime\nimport unittest\n\nclass TestParsers(unittest.TestCase):\n\tdef test_parse_date(self):\n\t\tdate = '8/5/2013 16:14'\n\t\tself.assertEqual(datetime.datetime(2013, 8, 5, 16, 14),\n\t\t\t\t\t\tundertest.parse_date(date))\n\n\tdef test_part_date_short(self):\n\t\tdate = '8/5/13 16:14'\n\t\tself.assertEqual(datetime.datetime(2013, 8, 5, 16, 14),\n\t\t\t\t\t\tundertest.parse_date_short(date))\n\n\tdef test_parse_line(self):\n\t\tline = [\"1\",\"2\",\"3\"]\n\t\tactual = undertest.parse_line(line)\n\t\texpected = [1,2,3]\n\t\tself.assertTrue(all(x == y for x, y in zip(expected, actual)))\n\nif __name__ == '__main__':\n\tunittest.main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
l = input().split("+")
l.sort()
print('+'.join(l))
|
normal
|
{
"blob_id": "30d891c18f3635b7419fa0d0539b2665ad60b22c",
"index": 4748,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nl.sort()\nprint('+'.join(l))\n",
"step-3": "l = input().split('+')\nl.sort()\nprint('+'.join(l))\n",
"step-4": "l = input().split(\"+\")\r\r\nl.sort()\r\r\nprint('+'.join(l))\r\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django import forms
from basic_app_new.models import *
class UpdateFood(forms.ModelForm):
class Meta:
model = Old_Food_Diary
fields = ['mfg_code', 'food_name', 'description', 'food_type',
'calories', 'fats', 'protein', 'carbohydrates', 'link_of_image',
'link_of_recipie', 'purchasing_link']
class UpdatePurchaseFood(forms.ModelForm):
class Meta:
model = purchase_cards
fields = ['food_name', 'description', 'ss_code', 'calorie', 'fat',
'protein', 'carbs', 'image_path']
|
normal
|
{
"blob_id": "3a1b0b9891fec7b3d722f77cd2f3f6efa878a7a0",
"index": 4255,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass UpdatePurchaseFood(forms.ModelForm):\n\n\n class Meta:\n model = purchase_cards\n fields = ['food_name', 'description', 'ss_code', 'calorie', 'fat',\n 'protein', 'carbs', 'image_path']\n",
"step-3": "<mask token>\n\n\nclass UpdateFood(forms.ModelForm):\n\n\n class Meta:\n model = Old_Food_Diary\n fields = ['mfg_code', 'food_name', 'description', 'food_type',\n 'calories', 'fats', 'protein', 'carbohydrates', 'link_of_image',\n 'link_of_recipie', 'purchasing_link']\n\n\nclass UpdatePurchaseFood(forms.ModelForm):\n\n\n class Meta:\n model = purchase_cards\n fields = ['food_name', 'description', 'ss_code', 'calorie', 'fat',\n 'protein', 'carbs', 'image_path']\n",
"step-4": "from django import forms\nfrom basic_app_new.models import *\n\n\nclass UpdateFood(forms.ModelForm):\n\n\n class Meta:\n model = Old_Food_Diary\n fields = ['mfg_code', 'food_name', 'description', 'food_type',\n 'calories', 'fats', 'protein', 'carbohydrates', 'link_of_image',\n 'link_of_recipie', 'purchasing_link']\n\n\nclass UpdatePurchaseFood(forms.ModelForm):\n\n\n class Meta:\n model = purchase_cards\n fields = ['food_name', 'description', 'ss_code', 'calorie', 'fat',\n 'protein', 'carbs', 'image_path']\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Generated by Django 3.2 on 2021-05-03 17:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0005_alter_orderitem_price'),
]
operations = [
migrations.AddField(
model_name='order',
name='being_delivered',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='order',
name='payment_id',
field=models.CharField(blank=True, max_length=150),
),
migrations.AddField(
model_name='order',
name='ref_code',
field=models.CharField(blank=True, max_length=20, null=True),
),
]
|
normal
|
{
"blob_id": "f3b466dc5b6149be82b096791ca8445faf169380",
"index": 5216,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0005_alter_orderitem_price')]\n operations = [migrations.AddField(model_name='order', name=\n 'being_delivered', field=models.BooleanField(default=False)),\n migrations.AddField(model_name='order', name='payment_id', field=\n models.CharField(blank=True, max_length=150)), migrations.AddField(\n model_name='order', name='ref_code', field=models.CharField(blank=\n True, max_length=20, null=True))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('orders', '0005_alter_orderitem_price')]\n operations = [migrations.AddField(model_name='order', name=\n 'being_delivered', field=models.BooleanField(default=False)),\n migrations.AddField(model_name='order', name='payment_id', field=\n models.CharField(blank=True, max_length=150)), migrations.AddField(\n model_name='order', name='ref_code', field=models.CharField(blank=\n True, max_length=20, null=True))]\n",
"step-5": "# Generated by Django 3.2 on 2021-05-03 17:13\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('orders', '0005_alter_orderitem_price'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='order',\n name='being_delivered',\n field=models.BooleanField(default=False),\n ),\n migrations.AddField(\n model_name='order',\n name='payment_id',\n field=models.CharField(blank=True, max_length=150),\n ),\n migrations.AddField(\n model_name='order',\n name='ref_code',\n field=models.CharField(blank=True, max_length=20, null=True),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import json
import pytest
from datetime import datetime
from collections import OrderedDict
import numpy as np
import pandas as pd
from sqlalchemy.exc import ProgrammingError
import pawprint
def test_create_table_with_default_options(pawprint_default_tracker_db):
"""Ensure the table is correctly created with the default schema."""
tracker = pawprint_default_tracker_db
# The table shouldn't exist. Assert it's correct created.
assert tracker.create_table() is None
# Try creating it again. This should raise an error.
with pytest.raises(ProgrammingError):
tracker.create_table()
# Assert the table is empty when created
assert pd.io.sql.execute(
"SELECT COUNT(*) FROM {}".format(tracker.table), tracker.db
).fetchall() == [(0,)]
# Ensure its schema is correct
schema = pd.io.sql.execute(
"SELECT column_name, data_type, character_maximum_length "
"FROM INFORMATION_SCHEMA.COLUMNS "
"WHERE table_name = '{}'".format(tracker.table),
tracker.db,
).fetchall()
expected_schema = [
(u"id", u"integer", None),
(u"timestamp", u"timestamp without time zone", None),
(u"user_id", u"text", None),
(u"event", u"text", None),
(u"metadata", u"jsonb", None),
]
assert schema == expected_schema
def test_drop_table(pawprint_default_tracker_db_with_table):
"""Ensure that tables are deleted successfully."""
tracker = pawprint_default_tracker_db_with_table
# make sure table exists
with pytest.raises(ProgrammingError):
tracker.create_table()
tracker.drop_table()
with pytest.raises(ProgrammingError):
tracker.drop_table()
def test_instantiate_tracker_from_dot_file(drop_tracker_test_table):
"""Test instantiating a Tracker with a dotfile instead of using db and table strings."""
# Write a dotfile to disk
dotfile = {
"db": "postgresql:///little_bean_toes",
"json_field": "such_fuzzy",
}
with open(".pawprint", "w") as f:
json.dump(dotfile, f)
# Create a tracker from this dotfile
tracker = pawprint.Tracker(dotfile=".pawprint", json_field="boop")
# Ensure all the entries are as they should be
assert tracker.db == "postgresql:///little_bean_toes"
assert tracker.table is None
# assert tracker.logger is None
assert tracker.json_field == "boop" # field present in dotfile but overwritten in init
os.remove(".pawprint")
def test_create_table_with_other_options(
drop_tracker_test_table, db_string, tracker_test_table_name
):
"""Ensure the table is correctly created with an alternative schema."""
schema = OrderedDict([("pk", "SERIAL PRIMARY KEY"), ("infofield", "TEXT")])
tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name, schema=schema)
tracker.create_table()
# Ensure its schema is correct
schema = pd.io.sql.execute(
"SELECT column_name, data_type, character_maximum_length "
"FROM INFORMATION_SCHEMA.COLUMNS "
"WHERE table_name = '{}'".format(tracker.table),
tracker.db,
).fetchall()
assert schema == [("pk", "integer", None), ("infofield", "text", None)]
def test_write(drop_tracker_test_table, db_string, tracker_test_table_name):
"""Test the tracking of an event."""
tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name, schema={"id": "INT"})
tracker.create_table()
# Check the table's empty
assert pd.io.sql.execute(
"SELECT COUNT(*) FROM {}".format(tracker.table), tracker.db
).fetchall() == [(0,)]
# Add some data and check if the row count increases by one
tracker.write(id=1337)
assert pd.io.sql.execute(
"SELECT COUNT(*) FROM {}".format(tracker.table), tracker.db
).fetchall() == [(1,)]
# Pull the data and ensure it's correct
data = pd.read_sql("SELECT * FROM {}".format(tracker.table), tracker.db)
assert isinstance(data, pd.DataFrame)
assert len(data.columns) == 1
assert data.columns[0] == "id"
assert data.id[0] == 1337
def test_read(pawprint_default_tracker_db_with_table):
"""Test pulling the data into a dataframe according to various simple filters."""
tracker = pawprint_default_tracker_db_with_table
# Ensure the table is empty to begin with
assert len(tracker.read()) == 0
# Add some data
tracker.write(user_id="Pawprint", event="Testing !")
tracker.write(user_id="Pawprint")
tracker.write(event="No user")
tracker.write(
user_id="import this",
event="very zen",
metadata={
"better": "forgiveness",
"worse": "permission",
"ordered": ["simple", "complex", "complicated"],
},
)
all_data = tracker.read()
pawprint_events = tracker.read(user_id="Pawprint")
id_gt_events = tracker.read(id__gt=10)
id_gte_lt_events = tracker.read(id__gte=1, id__lt=3)
field_events = tracker.read("event", id__lte=100, event="very zen")
contains_events = tracker.read(metadata__contains="better")
not_contains_events = tracker.read(metadata__contains="whisky")
assert len(all_data) == 4
assert len(pawprint_events) == 2
assert len(id_gt_events) == 0
assert len(id_gte_lt_events) == 2
assert len(field_events) == 1
assert len(contains_events) == 1
assert len(not_contains_events) == 0
assert set(all_data.columns) == set(["id", "user_id", "event", "metadata", "timestamp"])
assert set(field_events.columns) == set(["event"])
def test_counts(pawprint_default_tracker_db_with_table):
"""Test counting a specific event, with date ranges and time resolutions."""
tracker = pawprint_default_tracker_db_with_table
# Add a bunch of events
query = (
"""
INSERT INTO {} (timestamp, user_id, event) VALUES
('2016-01-01 12:30', 'alice', 'logged_in'),
('2016-01-01 12:40', 'bob', 'logged_in'),
('2016-01-01 16:00', 'charlotte', 'logged_in'),
('2016-01-02 00:00', 'dan', 'logged_in'),
('2016-01-02 00:00', 'elizabeth', 'logged_in'),
('2016-01-05 00:00', 'frank', 'logged_in'),
('2016-01-10 00:00', 'gabrielle', 'logged_in'),
('2016-01-20 00:00', 'hans', 'logged_in'),
('2016-02-01 00:00', 'iris', 'logged_in'),
('2016-02-01 00:00', 'james', 'logged_in'),
('2016-03-01 00:00', 'kelly', 'logged_in'),
('2016-03-01 00:00', 'laura', 'logged_in'),
('2016-03-01 00:00', 'mike', 'not_logged_in')
"""
).format(tracker.table)
pd.io.sql.execute(query, tracker.db)
logins_hourly = tracker.count(event="logged_in", resolution="hour")
logins_daily = tracker.count(event="logged_in")
logins_weekly = tracker.count(event="logged_in", resolution="week")
logins_monthly = tracker.count(event="logged_in", resolution="month")
logins_weekly_left_range = tracker.count(
event="logged_in", resolution="week", start=datetime(2016, 2, 1)
)
logins_weekly_right_range = tracker.count(
event="logged_in", resolution="week", end=datetime(2016, 2, 1)
)
logins_daily_full_range = tracker.count(
event="logged_in", start=datetime(2016, 1, 15), end=datetime(2016, 2, 15)
)
# Hourly
assert len(logins_hourly) == 8
assert np.all(logins_hourly["count"].values == [2, 1, 2, 1, 1, 1, 2, 2])
# Daily
assert len(logins_daily) == 7
assert np.all(logins_daily["count"].values == [3, 2, 1, 1, 1, 2, 2])
# Weekly
assert len(logins_weekly) == 5
assert np.all(logins_weekly["count"].values == [5, 2, 1, 2, 2])
# Others
assert len(logins_monthly) == 3
assert len(logins_weekly_left_range) == 2 # weeks start on Monday
assert len(logins_weekly_right_range) == 4 # and not at the start / end dates provided
assert len(logins_daily_full_range) == 2
def test_sum_and_average(pawprint_default_tracker_db_with_table):
"""Test aggregating a specific event, with date ranges and time resolutions."""
tracker = pawprint_default_tracker_db_with_table
metadata = str('{"val": 1}').replace("'", '"')
# Add a bunch of events
query = (
"""
INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES
('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),
('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),
('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),
('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),
('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),
('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),
('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),
('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),
('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),
('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),
('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),
('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),
('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')
"""
).format(table=tracker.table, metadata=metadata)
pd.io.sql.execute(query, tracker.db)
x_sum_daily_all = tracker.sum("metadata__val")
x_sum_daily = tracker.sum("metadata__val", event="logged_in")
x_avg_daily_all = tracker.average("metadata__val", event="logged_in")
x_avg_daily = tracker.average("metadata__val", event="logged_in")
assert len(x_sum_daily) == 7
assert np.all(x_sum_daily_all["sum"].values == [3, 2, 1, 1, 1, 2, 3])
assert np.all(x_sum_daily["sum"].values == [3, 2, 1, 1, 1, 2, 2])
assert np.all(x_avg_daily_all["avg"].values == [1, 1, 1, 1, 1, 1, 1])
assert np.all(x_avg_daily["avg"] == x_avg_daily_all["avg"])
def test_parse_fields(pawprint_default_tracker_db):
"""Test args passed to read() and _aggregate() are parsed correctly."""
tracker = pawprint_default_tracker_db
# SELECT * FROM table
args = ()
assert tracker._parse_fields(*args) == "*"
# SELECT event FROM table
args = ("event",)
assert tracker._parse_fields(*args) == "event"
# SELECT user_id, timestamp FROM table
args = ("user_id", "timestamp")
assert tracker._parse_fields(*args) == "user_id, timestamp"
# SELECT metadata #>> '{a, b}' FROM table
args = ("metadata__a__b",)
assert tracker._parse_fields(*args) == "metadata #> '{a, b}' AS json_field"
def test_parse_values(pawprint_default_tracker_db):
"""Test parsing values for write()."""
tracker = pawprint_default_tracker_db
# INSERT INTO table (event) VALUES ('logged_in')
args = ("logged_in",)
assert tracker._parse_values(*args) == "'logged_in'"
# INSERT INTO table (event, user_id) VALUES ('logged_in', 'hannah')
args = ("logged_in", "hannah")
assert tracker._parse_values(*args) == "'logged_in', 'hannah'"
def test_parse_conditionals(pawprint_default_tracker_db):
"""Test kwargs passed to read() and _aggregate() are parsed correctly."""
tracker = pawprint_default_tracker_db
# SELECT * FROM table
kwargs = {}
assert tracker._parse_conditionals(**kwargs) == ""
# SELECT * FROM table WHERE user_id = 'Quentin'
kwargs = {"user_id": "Quentin"}
assert tracker._parse_conditionals(**kwargs) == "WHERE user_id = 'Quentin'"
# SELECT * FROM table WHERE event = 'logged_in' AND user_id = 'Quentin'
kwargs = {"event": "logged_in", "user_id": "Quentin"}
assert tracker._parse_conditionals(**kwargs) in (
"WHERE event = 'logged_in' AND user_id = 'Quentin'",
"WHERE user_id = 'Quentin' AND event = 'logged_in'",
)
# SELECT * FROM table WHERE event IN ('logged_in', 'logged_out')
kwargs = {"event__in": ["logged_in", "logged_out"]}
assert tracker._parse_conditionals(**kwargs) == "WHERE event IN ('logged_in', 'logged_out')"
def test_accessing_json_fields(pawprint_default_tracker_db_with_table):
"""Test some structured data pulling."""
tracker = pawprint_default_tracker_db_with_table
# JSON objects in our tracking database
simple = {"integral": "derivative"}
medium = {"montecarlo": {"prior": "likelihood"}}
difficult = {
"deepnet": ["mlp", "cnn", "rnn"],
"ensembles": {"random": "forest", "always": {"cross_validate": ["kfold", "stratified"]}},
}
tracker.write(event="maths", metadata=simple)
tracker.write(event="stats", metadata=medium)
tracker.write(event="ml", metadata=difficult)
maths_all = tracker.read("metadata__integral")
maths_condition = tracker.read("metadata__integral", event="maths")
assert len(maths_all) == 3
assert len(maths_condition) == 1
assert list(maths_all.json_field) == ["derivative", None, None]
stats = tracker.read("metadata__montecarlo__prior").dropna()
assert len(stats) == 1
assert stats.json_field.iloc[0] == "likelihood"
types_of_nn = tracker.read("metadata__deepnet").dropna()
best_nn = tracker.read("metadata__deepnet__1").dropna()
full_depth = tracker.read("metadata__ensembles__always__cross_validate__0").dropna()
assert len(types_of_nn) == 1
assert len(best_nn) == 1
assert best_nn.json_field.iloc[0] == "cnn"
assert len(full_depth) == 1
assert full_depth.json_field.iloc[0] == "kfold"
def test_json_maths(pawprint_default_tracker_db_with_table):
"""More advanced operations on JSON subfields."""
tracker = pawprint_default_tracker_db_with_table
tracker.write(event="whisky", metadata={"uigeadail": {"value": 123, "lagavulin": [4, 2]}})
tracker.write(event="whisky", metadata={"uigeadail": {"value": 456, "lagavulin": [5, 0]}})
tracker.write(event="whisky", metadata={"uigeadail": {"value": 758, "lagavulin": [7, 10]}})
tracker.write(event="armagnac", metadata={"age": "XO"})
tracker.write(event="armagnac", metadata={"age": 15})
assert len(tracker.read()) == 5
assert len(tracker.read(metadata__uigeadail__contains="lagavulin")) == 3
assert len(tracker.read(metadata__uigeadail__value__gt=123)) == 2
assert len(tracker.read(metadata__uigeadail__value__gte=123)) == 3
whiskies = tracker.sum("metadata__uigeadail__value")
assert len(whiskies) == 1
assert whiskies.iloc[0]["sum"] == 1337
assert len(tracker.read(metadata__contains="age")) == 2
assert len(tracker.read(metadata__age="XO")) == 1
def test_silent_write_errors():
"""When a failure occurs in event write, it should fail silently."""
tracker = pawprint.Tracker(db=None, table=None)
try:
tracker.write(event="This will fail silently.")
except Exception:
pytest.fail("Failed to fail silently.")
def test_nonsilent_write_errors(error_logger):
"""Test non-silent write errors that should output to the logger or raise exceptions."""
tracker = pawprint.Tracker(db="postgresql:///fail", logger=error_logger)
with pytest.raises(Exception):
tracker.write()
with pytest.raises(Exception):
tracker.write(event="going_to_fail")
with open("pawprint.log", mode="r") as f:
logs = f.readlines()
print(logs[3])
assert len(logs) == 6
assert logs[0].startswith("pawprint: pawprint failed to write.")
assert "Table: None. Query: INSERT INTO None () VALUES ();" in logs[0]
assert "Query: INSERT INTO None (event) VALUES ('going_to_fail')" in logs[3]
os.remove("pawprint.log")
def test_auto_timestamp(db_string):
"""Ensure that timestamps are autopopulated correctly if not passed."""
# Define a schema where the timestamp doesn't automatically populate through the database
schema = {"event": "TEXT", "timestamp": "TIMESTAMP"}
# Put together two trackers, one that autopopulates the timestamp
no_auto = pawprint.Tracker(db=db_string, table="no_auto", auto_timestamp=False, schema=schema)
auto = pawprint.Tracker(db=db_string, table="auto", auto_timestamp=True, schema=schema)
# Create clean tables
no_auto.create_table()
auto.create_table()
# Write events with no timestamp
no_auto.write(event="foo")
auto.write(event="bar")
assert len(no_auto.read()) == 1
assert len(auto.read()) == 1
assert len(no_auto.read().dropna()) == 0
assert len(auto.read().dropna()) == 1
# Drop tables at the end
no_auto.drop_table()
auto.drop_table()
def test_repr_and_str(pawprint_default_tracker_db):
"""Test the __repr__ and __str__."""
tracker = pawprint_default_tracker_db
expected_repr = "pawprint.Tracker on table '{}' and database '{}'".format(
tracker.table, tracker.db
)
expected_str = "pawprint Tracker object.\ndb : {}\ntable : {}".format(tracker.db, tracker.table)
assert tracker.__repr__() == expected_repr
assert tracker.__str__() == expected_str
def test_malicious_strings(pawprint_default_tracker_db_with_table):
"""Test that SQL injection strings are sanitized"""
tracker = pawprint_default_tracker_db_with_table
tracker.write(
event="armageddon",
metadata={
"shady business": {
"with": "the following string",
"of sql": "50');INSERT INTO {table} (event, user_id) VALUES "
"('you got pwnd', '50".format(table=tracker.table),
}
},
)
assert len(tracker.read()) == 1
tracker.write(
event="armageddon",
metadata={
"more shady business": {
"my shady sql": "' OR '1'='1;DROP TABLE {table};".format(table=tracker.table)
}
},
)
assert len(tracker.read()) == 2
tracker.write(
event="' OR '1'='1;",
metadata={"foo": "x'); DROP TABLE {table}; --".format(table=tracker.table)},
)
assert len(tracker.read()) == 3
def test_escaping_from_quotes(pawprint_default_tracker_db_with_table):
tracker = pawprint_default_tracker_db_with_table
tracker.write(
event="known crummy string",
metadata={
"foo": {
"toState": "#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2",
"fromState": "#/app/dealdetails/2345",
"platform": "iOS App",
}
},
)
assert len(tracker.read()) == 1
|
normal
|
{
"blob_id": "89a75ae980b7b48d33d0e8aa53ec92296dbfbc8e",
"index": 2843,
"step-1": "<mask token>\n\n\ndef test_create_table_with_default_options(pawprint_default_tracker_db):\n \"\"\"Ensure the table is correctly created with the default schema.\"\"\"\n tracker = pawprint_default_tracker_db\n assert tracker.create_table() is None\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(0,)]\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n expected_schema = [(u'id', u'integer', None), (u'timestamp',\n u'timestamp without time zone', None), (u'user_id', u'text', None),\n (u'event', u'text', None), (u'metadata', u'jsonb', None)]\n assert schema == expected_schema\n\n\ndef test_drop_table(pawprint_default_tracker_db_with_table):\n \"\"\"Ensure that tables are deleted successfully.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n tracker.drop_table()\n with pytest.raises(ProgrammingError):\n tracker.drop_table()\n\n\n<mask token>\n\n\ndef test_create_table_with_other_options(drop_tracker_test_table, db_string,\n tracker_test_table_name):\n \"\"\"Ensure the table is correctly created with an alternative schema.\"\"\"\n schema = OrderedDict([('pk', 'SERIAL PRIMARY KEY'), ('infofield', 'TEXT')])\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name,\n schema=schema)\n tracker.create_table()\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n assert schema == [('pk', 'integer', None), ('infofield', 'text', None)]\n\n\n<mask token>\n\n\ndef test_counts(pawprint_default_tracker_db_with_table):\n \"\"\"Test counting a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n query = (\n \"\"\"\n INSERT INTO {} (timestamp, user_id, event) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in'),\n ('2016-01-01 12:40', 'bob', 'logged_in'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in'),\n ('2016-01-02 00:00', 'dan', 'logged_in'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in'),\n ('2016-01-05 00:00', 'frank', 'logged_in'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in'),\n ('2016-01-20 00:00', 'hans', 'logged_in'),\n ('2016-02-01 00:00', 'iris', 'logged_in'),\n ('2016-02-01 00:00', 'james', 'logged_in'),\n ('2016-03-01 00:00', 'kelly', 'logged_in'),\n ('2016-03-01 00:00', 'laura', 'logged_in'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in')\n \"\"\"\n .format(tracker.table))\n pd.io.sql.execute(query, tracker.db)\n logins_hourly = tracker.count(event='logged_in', resolution='hour')\n logins_daily = tracker.count(event='logged_in')\n logins_weekly = tracker.count(event='logged_in', resolution='week')\n logins_monthly = tracker.count(event='logged_in', resolution='month')\n logins_weekly_left_range = tracker.count(event='logged_in', resolution=\n 'week', start=datetime(2016, 2, 1))\n logins_weekly_right_range = tracker.count(event='logged_in', resolution\n ='week', end=datetime(2016, 2, 1))\n logins_daily_full_range = tracker.count(event='logged_in', start=\n datetime(2016, 1, 15), end=datetime(2016, 2, 15))\n assert len(logins_hourly) == 8\n assert np.all(logins_hourly['count'].values == [2, 1, 2, 1, 1, 1, 2, 2])\n assert len(logins_daily) == 7\n assert np.all(logins_daily['count'].values == [3, 2, 1, 1, 1, 2, 2])\n assert len(logins_weekly) == 5\n assert np.all(logins_weekly['count'].values == [5, 2, 1, 2, 2])\n assert len(logins_monthly) == 3\n assert len(logins_weekly_left_range) == 2\n assert len(logins_weekly_right_range) == 4\n assert len(logins_daily_full_range) == 2\n\n\ndef test_sum_and_average(pawprint_default_tracker_db_with_table):\n \"\"\"Test aggregating a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n .format(table=tracker.table, metadata=metadata))\n pd.io.sql.execute(query, tracker.db)\n x_sum_daily_all = tracker.sum('metadata__val')\n x_sum_daily = tracker.sum('metadata__val', event='logged_in')\n x_avg_daily_all = tracker.average('metadata__val', event='logged_in')\n x_avg_daily = tracker.average('metadata__val', event='logged_in')\n assert len(x_sum_daily) == 7\n assert np.all(x_sum_daily_all['sum'].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily['sum'].values == [3, 2, 1, 1, 1, 2, 2])\n assert np.all(x_avg_daily_all['avg'].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily['avg'] == x_avg_daily_all['avg'])\n\n\n<mask token>\n\n\ndef test_silent_write_errors():\n \"\"\"When a failure occurs in event write, it should fail silently.\"\"\"\n tracker = pawprint.Tracker(db=None, table=None)\n try:\n tracker.write(event='This will fail silently.')\n except Exception:\n pytest.fail('Failed to fail silently.')\n\n\n<mask token>\n\n\ndef test_malicious_strings(pawprint_default_tracker_db_with_table):\n \"\"\"Test that SQL injection strings are sanitized\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='armageddon', metadata={'shady business': {'with':\n 'the following string', 'of sql':\n \"50');INSERT INTO {table} (event, user_id) VALUES ('you got pwnd', '50\"\n .format(table=tracker.table)}})\n assert len(tracker.read()) == 1\n tracker.write(event='armageddon', metadata={'more shady business': {\n 'my shady sql': \"' OR '1'='1;DROP TABLE {table};\".format(table=\n tracker.table)}})\n assert len(tracker.read()) == 2\n tracker.write(event=\"' OR '1'='1;\", metadata={'foo':\n \"x'); DROP TABLE {table}; --\".format(table=tracker.table)})\n assert len(tracker.read()) == 3\n\n\ndef test_escaping_from_quotes(pawprint_default_tracker_db_with_table):\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='known crummy string', metadata={'foo': {'toState':\n \"#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2\",\n 'fromState': '#/app/dealdetails/2345', 'platform': 'iOS App'}})\n assert len(tracker.read()) == 1\n",
"step-2": "<mask token>\n\n\ndef test_create_table_with_default_options(pawprint_default_tracker_db):\n \"\"\"Ensure the table is correctly created with the default schema.\"\"\"\n tracker = pawprint_default_tracker_db\n assert tracker.create_table() is None\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(0,)]\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n expected_schema = [(u'id', u'integer', None), (u'timestamp',\n u'timestamp without time zone', None), (u'user_id', u'text', None),\n (u'event', u'text', None), (u'metadata', u'jsonb', None)]\n assert schema == expected_schema\n\n\ndef test_drop_table(pawprint_default_tracker_db_with_table):\n \"\"\"Ensure that tables are deleted successfully.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n tracker.drop_table()\n with pytest.raises(ProgrammingError):\n tracker.drop_table()\n\n\n<mask token>\n\n\ndef test_create_table_with_other_options(drop_tracker_test_table, db_string,\n tracker_test_table_name):\n \"\"\"Ensure the table is correctly created with an alternative schema.\"\"\"\n schema = OrderedDict([('pk', 'SERIAL PRIMARY KEY'), ('infofield', 'TEXT')])\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name,\n schema=schema)\n tracker.create_table()\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n assert schema == [('pk', 'integer', None), ('infofield', 'text', None)]\n\n\n<mask token>\n\n\ndef test_counts(pawprint_default_tracker_db_with_table):\n \"\"\"Test counting a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n query = (\n \"\"\"\n INSERT INTO {} (timestamp, user_id, event) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in'),\n ('2016-01-01 12:40', 'bob', 'logged_in'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in'),\n ('2016-01-02 00:00', 'dan', 'logged_in'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in'),\n ('2016-01-05 00:00', 'frank', 'logged_in'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in'),\n ('2016-01-20 00:00', 'hans', 'logged_in'),\n ('2016-02-01 00:00', 'iris', 'logged_in'),\n ('2016-02-01 00:00', 'james', 'logged_in'),\n ('2016-03-01 00:00', 'kelly', 'logged_in'),\n ('2016-03-01 00:00', 'laura', 'logged_in'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in')\n \"\"\"\n .format(tracker.table))\n pd.io.sql.execute(query, tracker.db)\n logins_hourly = tracker.count(event='logged_in', resolution='hour')\n logins_daily = tracker.count(event='logged_in')\n logins_weekly = tracker.count(event='logged_in', resolution='week')\n logins_monthly = tracker.count(event='logged_in', resolution='month')\n logins_weekly_left_range = tracker.count(event='logged_in', resolution=\n 'week', start=datetime(2016, 2, 1))\n logins_weekly_right_range = tracker.count(event='logged_in', resolution\n ='week', end=datetime(2016, 2, 1))\n logins_daily_full_range = tracker.count(event='logged_in', start=\n datetime(2016, 1, 15), end=datetime(2016, 2, 15))\n assert len(logins_hourly) == 8\n assert np.all(logins_hourly['count'].values == [2, 1, 2, 1, 1, 1, 2, 2])\n assert len(logins_daily) == 7\n assert np.all(logins_daily['count'].values == [3, 2, 1, 1, 1, 2, 2])\n assert len(logins_weekly) == 5\n assert np.all(logins_weekly['count'].values == [5, 2, 1, 2, 2])\n assert len(logins_monthly) == 3\n assert len(logins_weekly_left_range) == 2\n assert len(logins_weekly_right_range) == 4\n assert len(logins_daily_full_range) == 2\n\n\ndef test_sum_and_average(pawprint_default_tracker_db_with_table):\n \"\"\"Test aggregating a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n .format(table=tracker.table, metadata=metadata))\n pd.io.sql.execute(query, tracker.db)\n x_sum_daily_all = tracker.sum('metadata__val')\n x_sum_daily = tracker.sum('metadata__val', event='logged_in')\n x_avg_daily_all = tracker.average('metadata__val', event='logged_in')\n x_avg_daily = tracker.average('metadata__val', event='logged_in')\n assert len(x_sum_daily) == 7\n assert np.all(x_sum_daily_all['sum'].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily['sum'].values == [3, 2, 1, 1, 1, 2, 2])\n assert np.all(x_avg_daily_all['avg'].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily['avg'] == x_avg_daily_all['avg'])\n\n\ndef test_parse_fields(pawprint_default_tracker_db):\n \"\"\"Test args passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n args = ()\n assert tracker._parse_fields(*args) == '*'\n args = 'event',\n assert tracker._parse_fields(*args) == 'event'\n args = 'user_id', 'timestamp'\n assert tracker._parse_fields(*args) == 'user_id, timestamp'\n args = 'metadata__a__b',\n assert tracker._parse_fields(*args) == \"metadata #> '{a, b}' AS json_field\"\n\n\ndef test_parse_values(pawprint_default_tracker_db):\n \"\"\"Test parsing values for write().\"\"\"\n tracker = pawprint_default_tracker_db\n args = 'logged_in',\n assert tracker._parse_values(*args) == \"'logged_in'\"\n args = 'logged_in', 'hannah'\n assert tracker._parse_values(*args) == \"'logged_in', 'hannah'\"\n\n\ndef test_parse_conditionals(pawprint_default_tracker_db):\n \"\"\"Test kwargs passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n kwargs = {}\n assert tracker._parse_conditionals(**kwargs) == ''\n kwargs = {'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) == \"WHERE user_id = 'Quentin'\"\n kwargs = {'event': 'logged_in', 'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) in (\n \"WHERE event = 'logged_in' AND user_id = 'Quentin'\",\n \"WHERE user_id = 'Quentin' AND event = 'logged_in'\")\n kwargs = {'event__in': ['logged_in', 'logged_out']}\n assert tracker._parse_conditionals(**kwargs\n ) == \"WHERE event IN ('logged_in', 'logged_out')\"\n\n\ndef test_accessing_json_fields(pawprint_default_tracker_db_with_table):\n \"\"\"Test some structured data pulling.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n simple = {'integral': 'derivative'}\n medium = {'montecarlo': {'prior': 'likelihood'}}\n difficult = {'deepnet': ['mlp', 'cnn', 'rnn'], 'ensembles': {'random':\n 'forest', 'always': {'cross_validate': ['kfold', 'stratified']}}}\n tracker.write(event='maths', metadata=simple)\n tracker.write(event='stats', metadata=medium)\n tracker.write(event='ml', metadata=difficult)\n maths_all = tracker.read('metadata__integral')\n maths_condition = tracker.read('metadata__integral', event='maths')\n assert len(maths_all) == 3\n assert len(maths_condition) == 1\n assert list(maths_all.json_field) == ['derivative', None, None]\n stats = tracker.read('metadata__montecarlo__prior').dropna()\n assert len(stats) == 1\n assert stats.json_field.iloc[0] == 'likelihood'\n types_of_nn = tracker.read('metadata__deepnet').dropna()\n best_nn = tracker.read('metadata__deepnet__1').dropna()\n full_depth = tracker.read('metadata__ensembles__always__cross_validate__0'\n ).dropna()\n assert len(types_of_nn) == 1\n assert len(best_nn) == 1\n assert best_nn.json_field.iloc[0] == 'cnn'\n assert len(full_depth) == 1\n assert full_depth.json_field.iloc[0] == 'kfold'\n\n\ndef test_json_maths(pawprint_default_tracker_db_with_table):\n \"\"\"More advanced operations on JSON subfields.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 123,\n 'lagavulin': [4, 2]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 456,\n 'lagavulin': [5, 0]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 758,\n 'lagavulin': [7, 10]}})\n tracker.write(event='armagnac', metadata={'age': 'XO'})\n tracker.write(event='armagnac', metadata={'age': 15})\n assert len(tracker.read()) == 5\n assert len(tracker.read(metadata__uigeadail__contains='lagavulin')) == 3\n assert len(tracker.read(metadata__uigeadail__value__gt=123)) == 2\n assert len(tracker.read(metadata__uigeadail__value__gte=123)) == 3\n whiskies = tracker.sum('metadata__uigeadail__value')\n assert len(whiskies) == 1\n assert whiskies.iloc[0]['sum'] == 1337\n assert len(tracker.read(metadata__contains='age')) == 2\n assert len(tracker.read(metadata__age='XO')) == 1\n\n\ndef test_silent_write_errors():\n \"\"\"When a failure occurs in event write, it should fail silently.\"\"\"\n tracker = pawprint.Tracker(db=None, table=None)\n try:\n tracker.write(event='This will fail silently.')\n except Exception:\n pytest.fail('Failed to fail silently.')\n\n\ndef test_nonsilent_write_errors(error_logger):\n \"\"\"Test non-silent write errors that should output to the logger or raise exceptions.\"\"\"\n tracker = pawprint.Tracker(db='postgresql:///fail', logger=error_logger)\n with pytest.raises(Exception):\n tracker.write()\n with pytest.raises(Exception):\n tracker.write(event='going_to_fail')\n with open('pawprint.log', mode='r') as f:\n logs = f.readlines()\n print(logs[3])\n assert len(logs) == 6\n assert logs[0].startswith('pawprint: pawprint failed to write.')\n assert 'Table: None. Query: INSERT INTO None () VALUES ();' in logs[0]\n assert \"Query: INSERT INTO None (event) VALUES ('going_to_fail')\" in logs[3\n ]\n os.remove('pawprint.log')\n\n\n<mask token>\n\n\ndef test_repr_and_str(pawprint_default_tracker_db):\n \"\"\"Test the __repr__ and __str__.\"\"\"\n tracker = pawprint_default_tracker_db\n expected_repr = \"pawprint.Tracker on table '{}' and database '{}'\".format(\n tracker.table, tracker.db)\n expected_str = 'pawprint Tracker object.\\ndb : {}\\ntable : {}'.format(\n tracker.db, tracker.table)\n assert tracker.__repr__() == expected_repr\n assert tracker.__str__() == expected_str\n\n\ndef test_malicious_strings(pawprint_default_tracker_db_with_table):\n \"\"\"Test that SQL injection strings are sanitized\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='armageddon', metadata={'shady business': {'with':\n 'the following string', 'of sql':\n \"50');INSERT INTO {table} (event, user_id) VALUES ('you got pwnd', '50\"\n .format(table=tracker.table)}})\n assert len(tracker.read()) == 1\n tracker.write(event='armageddon', metadata={'more shady business': {\n 'my shady sql': \"' OR '1'='1;DROP TABLE {table};\".format(table=\n tracker.table)}})\n assert len(tracker.read()) == 2\n tracker.write(event=\"' OR '1'='1;\", metadata={'foo':\n \"x'); DROP TABLE {table}; --\".format(table=tracker.table)})\n assert len(tracker.read()) == 3\n\n\ndef test_escaping_from_quotes(pawprint_default_tracker_db_with_table):\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='known crummy string', metadata={'foo': {'toState':\n \"#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2\",\n 'fromState': '#/app/dealdetails/2345', 'platform': 'iOS App'}})\n assert len(tracker.read()) == 1\n",
"step-3": "<mask token>\n\n\ndef test_create_table_with_default_options(pawprint_default_tracker_db):\n \"\"\"Ensure the table is correctly created with the default schema.\"\"\"\n tracker = pawprint_default_tracker_db\n assert tracker.create_table() is None\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(0,)]\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n expected_schema = [(u'id', u'integer', None), (u'timestamp',\n u'timestamp without time zone', None), (u'user_id', u'text', None),\n (u'event', u'text', None), (u'metadata', u'jsonb', None)]\n assert schema == expected_schema\n\n\ndef test_drop_table(pawprint_default_tracker_db_with_table):\n \"\"\"Ensure that tables are deleted successfully.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n tracker.drop_table()\n with pytest.raises(ProgrammingError):\n tracker.drop_table()\n\n\ndef test_instantiate_tracker_from_dot_file(drop_tracker_test_table):\n \"\"\"Test instantiating a Tracker with a dotfile instead of using db and table strings.\"\"\"\n dotfile = {'db': 'postgresql:///little_bean_toes', 'json_field':\n 'such_fuzzy'}\n with open('.pawprint', 'w') as f:\n json.dump(dotfile, f)\n tracker = pawprint.Tracker(dotfile='.pawprint', json_field='boop')\n assert tracker.db == 'postgresql:///little_bean_toes'\n assert tracker.table is None\n assert tracker.json_field == 'boop'\n os.remove('.pawprint')\n\n\ndef test_create_table_with_other_options(drop_tracker_test_table, db_string,\n tracker_test_table_name):\n \"\"\"Ensure the table is correctly created with an alternative schema.\"\"\"\n schema = OrderedDict([('pk', 'SERIAL PRIMARY KEY'), ('infofield', 'TEXT')])\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name,\n schema=schema)\n tracker.create_table()\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n assert schema == [('pk', 'integer', None), ('infofield', 'text', None)]\n\n\n<mask token>\n\n\ndef test_read(pawprint_default_tracker_db_with_table):\n \"\"\"Test pulling the data into a dataframe according to various simple filters.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n assert len(tracker.read()) == 0\n tracker.write(user_id='Pawprint', event='Testing !')\n tracker.write(user_id='Pawprint')\n tracker.write(event='No user')\n tracker.write(user_id='import this', event='very zen', metadata={\n 'better': 'forgiveness', 'worse': 'permission', 'ordered': [\n 'simple', 'complex', 'complicated']})\n all_data = tracker.read()\n pawprint_events = tracker.read(user_id='Pawprint')\n id_gt_events = tracker.read(id__gt=10)\n id_gte_lt_events = tracker.read(id__gte=1, id__lt=3)\n field_events = tracker.read('event', id__lte=100, event='very zen')\n contains_events = tracker.read(metadata__contains='better')\n not_contains_events = tracker.read(metadata__contains='whisky')\n assert len(all_data) == 4\n assert len(pawprint_events) == 2\n assert len(id_gt_events) == 0\n assert len(id_gte_lt_events) == 2\n assert len(field_events) == 1\n assert len(contains_events) == 1\n assert len(not_contains_events) == 0\n assert set(all_data.columns) == set(['id', 'user_id', 'event',\n 'metadata', 'timestamp'])\n assert set(field_events.columns) == set(['event'])\n\n\ndef test_counts(pawprint_default_tracker_db_with_table):\n \"\"\"Test counting a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n query = (\n \"\"\"\n INSERT INTO {} (timestamp, user_id, event) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in'),\n ('2016-01-01 12:40', 'bob', 'logged_in'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in'),\n ('2016-01-02 00:00', 'dan', 'logged_in'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in'),\n ('2016-01-05 00:00', 'frank', 'logged_in'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in'),\n ('2016-01-20 00:00', 'hans', 'logged_in'),\n ('2016-02-01 00:00', 'iris', 'logged_in'),\n ('2016-02-01 00:00', 'james', 'logged_in'),\n ('2016-03-01 00:00', 'kelly', 'logged_in'),\n ('2016-03-01 00:00', 'laura', 'logged_in'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in')\n \"\"\"\n .format(tracker.table))\n pd.io.sql.execute(query, tracker.db)\n logins_hourly = tracker.count(event='logged_in', resolution='hour')\n logins_daily = tracker.count(event='logged_in')\n logins_weekly = tracker.count(event='logged_in', resolution='week')\n logins_monthly = tracker.count(event='logged_in', resolution='month')\n logins_weekly_left_range = tracker.count(event='logged_in', resolution=\n 'week', start=datetime(2016, 2, 1))\n logins_weekly_right_range = tracker.count(event='logged_in', resolution\n ='week', end=datetime(2016, 2, 1))\n logins_daily_full_range = tracker.count(event='logged_in', start=\n datetime(2016, 1, 15), end=datetime(2016, 2, 15))\n assert len(logins_hourly) == 8\n assert np.all(logins_hourly['count'].values == [2, 1, 2, 1, 1, 1, 2, 2])\n assert len(logins_daily) == 7\n assert np.all(logins_daily['count'].values == [3, 2, 1, 1, 1, 2, 2])\n assert len(logins_weekly) == 5\n assert np.all(logins_weekly['count'].values == [5, 2, 1, 2, 2])\n assert len(logins_monthly) == 3\n assert len(logins_weekly_left_range) == 2\n assert len(logins_weekly_right_range) == 4\n assert len(logins_daily_full_range) == 2\n\n\ndef test_sum_and_average(pawprint_default_tracker_db_with_table):\n \"\"\"Test aggregating a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n .format(table=tracker.table, metadata=metadata))\n pd.io.sql.execute(query, tracker.db)\n x_sum_daily_all = tracker.sum('metadata__val')\n x_sum_daily = tracker.sum('metadata__val', event='logged_in')\n x_avg_daily_all = tracker.average('metadata__val', event='logged_in')\n x_avg_daily = tracker.average('metadata__val', event='logged_in')\n assert len(x_sum_daily) == 7\n assert np.all(x_sum_daily_all['sum'].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily['sum'].values == [3, 2, 1, 1, 1, 2, 2])\n assert np.all(x_avg_daily_all['avg'].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily['avg'] == x_avg_daily_all['avg'])\n\n\ndef test_parse_fields(pawprint_default_tracker_db):\n \"\"\"Test args passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n args = ()\n assert tracker._parse_fields(*args) == '*'\n args = 'event',\n assert tracker._parse_fields(*args) == 'event'\n args = 'user_id', 'timestamp'\n assert tracker._parse_fields(*args) == 'user_id, timestamp'\n args = 'metadata__a__b',\n assert tracker._parse_fields(*args) == \"metadata #> '{a, b}' AS json_field\"\n\n\ndef test_parse_values(pawprint_default_tracker_db):\n \"\"\"Test parsing values for write().\"\"\"\n tracker = pawprint_default_tracker_db\n args = 'logged_in',\n assert tracker._parse_values(*args) == \"'logged_in'\"\n args = 'logged_in', 'hannah'\n assert tracker._parse_values(*args) == \"'logged_in', 'hannah'\"\n\n\ndef test_parse_conditionals(pawprint_default_tracker_db):\n \"\"\"Test kwargs passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n kwargs = {}\n assert tracker._parse_conditionals(**kwargs) == ''\n kwargs = {'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) == \"WHERE user_id = 'Quentin'\"\n kwargs = {'event': 'logged_in', 'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) in (\n \"WHERE event = 'logged_in' AND user_id = 'Quentin'\",\n \"WHERE user_id = 'Quentin' AND event = 'logged_in'\")\n kwargs = {'event__in': ['logged_in', 'logged_out']}\n assert tracker._parse_conditionals(**kwargs\n ) == \"WHERE event IN ('logged_in', 'logged_out')\"\n\n\ndef test_accessing_json_fields(pawprint_default_tracker_db_with_table):\n \"\"\"Test some structured data pulling.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n simple = {'integral': 'derivative'}\n medium = {'montecarlo': {'prior': 'likelihood'}}\n difficult = {'deepnet': ['mlp', 'cnn', 'rnn'], 'ensembles': {'random':\n 'forest', 'always': {'cross_validate': ['kfold', 'stratified']}}}\n tracker.write(event='maths', metadata=simple)\n tracker.write(event='stats', metadata=medium)\n tracker.write(event='ml', metadata=difficult)\n maths_all = tracker.read('metadata__integral')\n maths_condition = tracker.read('metadata__integral', event='maths')\n assert len(maths_all) == 3\n assert len(maths_condition) == 1\n assert list(maths_all.json_field) == ['derivative', None, None]\n stats = tracker.read('metadata__montecarlo__prior').dropna()\n assert len(stats) == 1\n assert stats.json_field.iloc[0] == 'likelihood'\n types_of_nn = tracker.read('metadata__deepnet').dropna()\n best_nn = tracker.read('metadata__deepnet__1').dropna()\n full_depth = tracker.read('metadata__ensembles__always__cross_validate__0'\n ).dropna()\n assert len(types_of_nn) == 1\n assert len(best_nn) == 1\n assert best_nn.json_field.iloc[0] == 'cnn'\n assert len(full_depth) == 1\n assert full_depth.json_field.iloc[0] == 'kfold'\n\n\ndef test_json_maths(pawprint_default_tracker_db_with_table):\n \"\"\"More advanced operations on JSON subfields.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 123,\n 'lagavulin': [4, 2]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 456,\n 'lagavulin': [5, 0]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 758,\n 'lagavulin': [7, 10]}})\n tracker.write(event='armagnac', metadata={'age': 'XO'})\n tracker.write(event='armagnac', metadata={'age': 15})\n assert len(tracker.read()) == 5\n assert len(tracker.read(metadata__uigeadail__contains='lagavulin')) == 3\n assert len(tracker.read(metadata__uigeadail__value__gt=123)) == 2\n assert len(tracker.read(metadata__uigeadail__value__gte=123)) == 3\n whiskies = tracker.sum('metadata__uigeadail__value')\n assert len(whiskies) == 1\n assert whiskies.iloc[0]['sum'] == 1337\n assert len(tracker.read(metadata__contains='age')) == 2\n assert len(tracker.read(metadata__age='XO')) == 1\n\n\ndef test_silent_write_errors():\n \"\"\"When a failure occurs in event write, it should fail silently.\"\"\"\n tracker = pawprint.Tracker(db=None, table=None)\n try:\n tracker.write(event='This will fail silently.')\n except Exception:\n pytest.fail('Failed to fail silently.')\n\n\ndef test_nonsilent_write_errors(error_logger):\n \"\"\"Test non-silent write errors that should output to the logger or raise exceptions.\"\"\"\n tracker = pawprint.Tracker(db='postgresql:///fail', logger=error_logger)\n with pytest.raises(Exception):\n tracker.write()\n with pytest.raises(Exception):\n tracker.write(event='going_to_fail')\n with open('pawprint.log', mode='r') as f:\n logs = f.readlines()\n print(logs[3])\n assert len(logs) == 6\n assert logs[0].startswith('pawprint: pawprint failed to write.')\n assert 'Table: None. Query: INSERT INTO None () VALUES ();' in logs[0]\n assert \"Query: INSERT INTO None (event) VALUES ('going_to_fail')\" in logs[3\n ]\n os.remove('pawprint.log')\n\n\ndef test_auto_timestamp(db_string):\n \"\"\"Ensure that timestamps are autopopulated correctly if not passed.\"\"\"\n schema = {'event': 'TEXT', 'timestamp': 'TIMESTAMP'}\n no_auto = pawprint.Tracker(db=db_string, table='no_auto',\n auto_timestamp=False, schema=schema)\n auto = pawprint.Tracker(db=db_string, table='auto', auto_timestamp=True,\n schema=schema)\n no_auto.create_table()\n auto.create_table()\n no_auto.write(event='foo')\n auto.write(event='bar')\n assert len(no_auto.read()) == 1\n assert len(auto.read()) == 1\n assert len(no_auto.read().dropna()) == 0\n assert len(auto.read().dropna()) == 1\n no_auto.drop_table()\n auto.drop_table()\n\n\ndef test_repr_and_str(pawprint_default_tracker_db):\n \"\"\"Test the __repr__ and __str__.\"\"\"\n tracker = pawprint_default_tracker_db\n expected_repr = \"pawprint.Tracker on table '{}' and database '{}'\".format(\n tracker.table, tracker.db)\n expected_str = 'pawprint Tracker object.\\ndb : {}\\ntable : {}'.format(\n tracker.db, tracker.table)\n assert tracker.__repr__() == expected_repr\n assert tracker.__str__() == expected_str\n\n\ndef test_malicious_strings(pawprint_default_tracker_db_with_table):\n \"\"\"Test that SQL injection strings are sanitized\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='armageddon', metadata={'shady business': {'with':\n 'the following string', 'of sql':\n \"50');INSERT INTO {table} (event, user_id) VALUES ('you got pwnd', '50\"\n .format(table=tracker.table)}})\n assert len(tracker.read()) == 1\n tracker.write(event='armageddon', metadata={'more shady business': {\n 'my shady sql': \"' OR '1'='1;DROP TABLE {table};\".format(table=\n tracker.table)}})\n assert len(tracker.read()) == 2\n tracker.write(event=\"' OR '1'='1;\", metadata={'foo':\n \"x'); DROP TABLE {table}; --\".format(table=tracker.table)})\n assert len(tracker.read()) == 3\n\n\ndef test_escaping_from_quotes(pawprint_default_tracker_db_with_table):\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='known crummy string', metadata={'foo': {'toState':\n \"#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2\",\n 'fromState': '#/app/dealdetails/2345', 'platform': 'iOS App'}})\n assert len(tracker.read()) == 1\n",
"step-4": "import os\nimport json\nimport pytest\nfrom datetime import datetime\nfrom collections import OrderedDict\nimport numpy as np\nimport pandas as pd\nfrom sqlalchemy.exc import ProgrammingError\nimport pawprint\n\n\ndef test_create_table_with_default_options(pawprint_default_tracker_db):\n \"\"\"Ensure the table is correctly created with the default schema.\"\"\"\n tracker = pawprint_default_tracker_db\n assert tracker.create_table() is None\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(0,)]\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n expected_schema = [(u'id', u'integer', None), (u'timestamp',\n u'timestamp without time zone', None), (u'user_id', u'text', None),\n (u'event', u'text', None), (u'metadata', u'jsonb', None)]\n assert schema == expected_schema\n\n\ndef test_drop_table(pawprint_default_tracker_db_with_table):\n \"\"\"Ensure that tables are deleted successfully.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n tracker.drop_table()\n with pytest.raises(ProgrammingError):\n tracker.drop_table()\n\n\ndef test_instantiate_tracker_from_dot_file(drop_tracker_test_table):\n \"\"\"Test instantiating a Tracker with a dotfile instead of using db and table strings.\"\"\"\n dotfile = {'db': 'postgresql:///little_bean_toes', 'json_field':\n 'such_fuzzy'}\n with open('.pawprint', 'w') as f:\n json.dump(dotfile, f)\n tracker = pawprint.Tracker(dotfile='.pawprint', json_field='boop')\n assert tracker.db == 'postgresql:///little_bean_toes'\n assert tracker.table is None\n assert tracker.json_field == 'boop'\n os.remove('.pawprint')\n\n\ndef test_create_table_with_other_options(drop_tracker_test_table, db_string,\n tracker_test_table_name):\n \"\"\"Ensure the table is correctly created with an alternative schema.\"\"\"\n schema = OrderedDict([('pk', 'SERIAL PRIMARY KEY'), ('infofield', 'TEXT')])\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name,\n schema=schema)\n tracker.create_table()\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length FROM INFORMATION_SCHEMA.COLUMNS WHERE table_name = '{}'\"\n .format(tracker.table), tracker.db).fetchall()\n assert schema == [('pk', 'integer', None), ('infofield', 'text', None)]\n\n\ndef test_write(drop_tracker_test_table, db_string, tracker_test_table_name):\n \"\"\"Test the tracking of an event.\"\"\"\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name,\n schema={'id': 'INT'})\n tracker.create_table()\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(0,)]\n tracker.write(id=1337)\n assert pd.io.sql.execute('SELECT COUNT(*) FROM {}'.format(tracker.table\n ), tracker.db).fetchall() == [(1,)]\n data = pd.read_sql('SELECT * FROM {}'.format(tracker.table), tracker.db)\n assert isinstance(data, pd.DataFrame)\n assert len(data.columns) == 1\n assert data.columns[0] == 'id'\n assert data.id[0] == 1337\n\n\ndef test_read(pawprint_default_tracker_db_with_table):\n \"\"\"Test pulling the data into a dataframe according to various simple filters.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n assert len(tracker.read()) == 0\n tracker.write(user_id='Pawprint', event='Testing !')\n tracker.write(user_id='Pawprint')\n tracker.write(event='No user')\n tracker.write(user_id='import this', event='very zen', metadata={\n 'better': 'forgiveness', 'worse': 'permission', 'ordered': [\n 'simple', 'complex', 'complicated']})\n all_data = tracker.read()\n pawprint_events = tracker.read(user_id='Pawprint')\n id_gt_events = tracker.read(id__gt=10)\n id_gte_lt_events = tracker.read(id__gte=1, id__lt=3)\n field_events = tracker.read('event', id__lte=100, event='very zen')\n contains_events = tracker.read(metadata__contains='better')\n not_contains_events = tracker.read(metadata__contains='whisky')\n assert len(all_data) == 4\n assert len(pawprint_events) == 2\n assert len(id_gt_events) == 0\n assert len(id_gte_lt_events) == 2\n assert len(field_events) == 1\n assert len(contains_events) == 1\n assert len(not_contains_events) == 0\n assert set(all_data.columns) == set(['id', 'user_id', 'event',\n 'metadata', 'timestamp'])\n assert set(field_events.columns) == set(['event'])\n\n\ndef test_counts(pawprint_default_tracker_db_with_table):\n \"\"\"Test counting a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n query = (\n \"\"\"\n INSERT INTO {} (timestamp, user_id, event) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in'),\n ('2016-01-01 12:40', 'bob', 'logged_in'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in'),\n ('2016-01-02 00:00', 'dan', 'logged_in'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in'),\n ('2016-01-05 00:00', 'frank', 'logged_in'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in'),\n ('2016-01-20 00:00', 'hans', 'logged_in'),\n ('2016-02-01 00:00', 'iris', 'logged_in'),\n ('2016-02-01 00:00', 'james', 'logged_in'),\n ('2016-03-01 00:00', 'kelly', 'logged_in'),\n ('2016-03-01 00:00', 'laura', 'logged_in'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in')\n \"\"\"\n .format(tracker.table))\n pd.io.sql.execute(query, tracker.db)\n logins_hourly = tracker.count(event='logged_in', resolution='hour')\n logins_daily = tracker.count(event='logged_in')\n logins_weekly = tracker.count(event='logged_in', resolution='week')\n logins_monthly = tracker.count(event='logged_in', resolution='month')\n logins_weekly_left_range = tracker.count(event='logged_in', resolution=\n 'week', start=datetime(2016, 2, 1))\n logins_weekly_right_range = tracker.count(event='logged_in', resolution\n ='week', end=datetime(2016, 2, 1))\n logins_daily_full_range = tracker.count(event='logged_in', start=\n datetime(2016, 1, 15), end=datetime(2016, 2, 15))\n assert len(logins_hourly) == 8\n assert np.all(logins_hourly['count'].values == [2, 1, 2, 1, 1, 1, 2, 2])\n assert len(logins_daily) == 7\n assert np.all(logins_daily['count'].values == [3, 2, 1, 1, 1, 2, 2])\n assert len(logins_weekly) == 5\n assert np.all(logins_weekly['count'].values == [5, 2, 1, 2, 2])\n assert len(logins_monthly) == 3\n assert len(logins_weekly_left_range) == 2\n assert len(logins_weekly_right_range) == 4\n assert len(logins_daily_full_range) == 2\n\n\ndef test_sum_and_average(pawprint_default_tracker_db_with_table):\n \"\"\"Test aggregating a specific event, with date ranges and time resolutions.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n .format(table=tracker.table, metadata=metadata))\n pd.io.sql.execute(query, tracker.db)\n x_sum_daily_all = tracker.sum('metadata__val')\n x_sum_daily = tracker.sum('metadata__val', event='logged_in')\n x_avg_daily_all = tracker.average('metadata__val', event='logged_in')\n x_avg_daily = tracker.average('metadata__val', event='logged_in')\n assert len(x_sum_daily) == 7\n assert np.all(x_sum_daily_all['sum'].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily['sum'].values == [3, 2, 1, 1, 1, 2, 2])\n assert np.all(x_avg_daily_all['avg'].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily['avg'] == x_avg_daily_all['avg'])\n\n\ndef test_parse_fields(pawprint_default_tracker_db):\n \"\"\"Test args passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n args = ()\n assert tracker._parse_fields(*args) == '*'\n args = 'event',\n assert tracker._parse_fields(*args) == 'event'\n args = 'user_id', 'timestamp'\n assert tracker._parse_fields(*args) == 'user_id, timestamp'\n args = 'metadata__a__b',\n assert tracker._parse_fields(*args) == \"metadata #> '{a, b}' AS json_field\"\n\n\ndef test_parse_values(pawprint_default_tracker_db):\n \"\"\"Test parsing values for write().\"\"\"\n tracker = pawprint_default_tracker_db\n args = 'logged_in',\n assert tracker._parse_values(*args) == \"'logged_in'\"\n args = 'logged_in', 'hannah'\n assert tracker._parse_values(*args) == \"'logged_in', 'hannah'\"\n\n\ndef test_parse_conditionals(pawprint_default_tracker_db):\n \"\"\"Test kwargs passed to read() and _aggregate() are parsed correctly.\"\"\"\n tracker = pawprint_default_tracker_db\n kwargs = {}\n assert tracker._parse_conditionals(**kwargs) == ''\n kwargs = {'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) == \"WHERE user_id = 'Quentin'\"\n kwargs = {'event': 'logged_in', 'user_id': 'Quentin'}\n assert tracker._parse_conditionals(**kwargs) in (\n \"WHERE event = 'logged_in' AND user_id = 'Quentin'\",\n \"WHERE user_id = 'Quentin' AND event = 'logged_in'\")\n kwargs = {'event__in': ['logged_in', 'logged_out']}\n assert tracker._parse_conditionals(**kwargs\n ) == \"WHERE event IN ('logged_in', 'logged_out')\"\n\n\ndef test_accessing_json_fields(pawprint_default_tracker_db_with_table):\n \"\"\"Test some structured data pulling.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n simple = {'integral': 'derivative'}\n medium = {'montecarlo': {'prior': 'likelihood'}}\n difficult = {'deepnet': ['mlp', 'cnn', 'rnn'], 'ensembles': {'random':\n 'forest', 'always': {'cross_validate': ['kfold', 'stratified']}}}\n tracker.write(event='maths', metadata=simple)\n tracker.write(event='stats', metadata=medium)\n tracker.write(event='ml', metadata=difficult)\n maths_all = tracker.read('metadata__integral')\n maths_condition = tracker.read('metadata__integral', event='maths')\n assert len(maths_all) == 3\n assert len(maths_condition) == 1\n assert list(maths_all.json_field) == ['derivative', None, None]\n stats = tracker.read('metadata__montecarlo__prior').dropna()\n assert len(stats) == 1\n assert stats.json_field.iloc[0] == 'likelihood'\n types_of_nn = tracker.read('metadata__deepnet').dropna()\n best_nn = tracker.read('metadata__deepnet__1').dropna()\n full_depth = tracker.read('metadata__ensembles__always__cross_validate__0'\n ).dropna()\n assert len(types_of_nn) == 1\n assert len(best_nn) == 1\n assert best_nn.json_field.iloc[0] == 'cnn'\n assert len(full_depth) == 1\n assert full_depth.json_field.iloc[0] == 'kfold'\n\n\ndef test_json_maths(pawprint_default_tracker_db_with_table):\n \"\"\"More advanced operations on JSON subfields.\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 123,\n 'lagavulin': [4, 2]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 456,\n 'lagavulin': [5, 0]}})\n tracker.write(event='whisky', metadata={'uigeadail': {'value': 758,\n 'lagavulin': [7, 10]}})\n tracker.write(event='armagnac', metadata={'age': 'XO'})\n tracker.write(event='armagnac', metadata={'age': 15})\n assert len(tracker.read()) == 5\n assert len(tracker.read(metadata__uigeadail__contains='lagavulin')) == 3\n assert len(tracker.read(metadata__uigeadail__value__gt=123)) == 2\n assert len(tracker.read(metadata__uigeadail__value__gte=123)) == 3\n whiskies = tracker.sum('metadata__uigeadail__value')\n assert len(whiskies) == 1\n assert whiskies.iloc[0]['sum'] == 1337\n assert len(tracker.read(metadata__contains='age')) == 2\n assert len(tracker.read(metadata__age='XO')) == 1\n\n\ndef test_silent_write_errors():\n \"\"\"When a failure occurs in event write, it should fail silently.\"\"\"\n tracker = pawprint.Tracker(db=None, table=None)\n try:\n tracker.write(event='This will fail silently.')\n except Exception:\n pytest.fail('Failed to fail silently.')\n\n\ndef test_nonsilent_write_errors(error_logger):\n \"\"\"Test non-silent write errors that should output to the logger or raise exceptions.\"\"\"\n tracker = pawprint.Tracker(db='postgresql:///fail', logger=error_logger)\n with pytest.raises(Exception):\n tracker.write()\n with pytest.raises(Exception):\n tracker.write(event='going_to_fail')\n with open('pawprint.log', mode='r') as f:\n logs = f.readlines()\n print(logs[3])\n assert len(logs) == 6\n assert logs[0].startswith('pawprint: pawprint failed to write.')\n assert 'Table: None. Query: INSERT INTO None () VALUES ();' in logs[0]\n assert \"Query: INSERT INTO None (event) VALUES ('going_to_fail')\" in logs[3\n ]\n os.remove('pawprint.log')\n\n\ndef test_auto_timestamp(db_string):\n \"\"\"Ensure that timestamps are autopopulated correctly if not passed.\"\"\"\n schema = {'event': 'TEXT', 'timestamp': 'TIMESTAMP'}\n no_auto = pawprint.Tracker(db=db_string, table='no_auto',\n auto_timestamp=False, schema=schema)\n auto = pawprint.Tracker(db=db_string, table='auto', auto_timestamp=True,\n schema=schema)\n no_auto.create_table()\n auto.create_table()\n no_auto.write(event='foo')\n auto.write(event='bar')\n assert len(no_auto.read()) == 1\n assert len(auto.read()) == 1\n assert len(no_auto.read().dropna()) == 0\n assert len(auto.read().dropna()) == 1\n no_auto.drop_table()\n auto.drop_table()\n\n\ndef test_repr_and_str(pawprint_default_tracker_db):\n \"\"\"Test the __repr__ and __str__.\"\"\"\n tracker = pawprint_default_tracker_db\n expected_repr = \"pawprint.Tracker on table '{}' and database '{}'\".format(\n tracker.table, tracker.db)\n expected_str = 'pawprint Tracker object.\\ndb : {}\\ntable : {}'.format(\n tracker.db, tracker.table)\n assert tracker.__repr__() == expected_repr\n assert tracker.__str__() == expected_str\n\n\ndef test_malicious_strings(pawprint_default_tracker_db_with_table):\n \"\"\"Test that SQL injection strings are sanitized\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='armageddon', metadata={'shady business': {'with':\n 'the following string', 'of sql':\n \"50');INSERT INTO {table} (event, user_id) VALUES ('you got pwnd', '50\"\n .format(table=tracker.table)}})\n assert len(tracker.read()) == 1\n tracker.write(event='armageddon', metadata={'more shady business': {\n 'my shady sql': \"' OR '1'='1;DROP TABLE {table};\".format(table=\n tracker.table)}})\n assert len(tracker.read()) == 2\n tracker.write(event=\"' OR '1'='1;\", metadata={'foo':\n \"x'); DROP TABLE {table}; --\".format(table=tracker.table)})\n assert len(tracker.read()) == 3\n\n\ndef test_escaping_from_quotes(pawprint_default_tracker_db_with_table):\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(event='known crummy string', metadata={'foo': {'toState':\n \"#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2\",\n 'fromState': '#/app/dealdetails/2345', 'platform': 'iOS App'}})\n assert len(tracker.read()) == 1\n",
"step-5": "import os\nimport json\nimport pytest\n\nfrom datetime import datetime\nfrom collections import OrderedDict\n\nimport numpy as np\nimport pandas as pd\nfrom sqlalchemy.exc import ProgrammingError\n\nimport pawprint\n\n\ndef test_create_table_with_default_options(pawprint_default_tracker_db):\n \"\"\"Ensure the table is correctly created with the default schema.\"\"\"\n\n tracker = pawprint_default_tracker_db\n\n # The table shouldn't exist. Assert it's correct created.\n assert tracker.create_table() is None\n\n # Try creating it again. This should raise an error.\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n\n # Assert the table is empty when created\n assert pd.io.sql.execute(\n \"SELECT COUNT(*) FROM {}\".format(tracker.table), tracker.db\n ).fetchall() == [(0,)]\n\n # Ensure its schema is correct\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length \"\n \"FROM INFORMATION_SCHEMA.COLUMNS \"\n \"WHERE table_name = '{}'\".format(tracker.table),\n tracker.db,\n ).fetchall()\n expected_schema = [\n (u\"id\", u\"integer\", None),\n (u\"timestamp\", u\"timestamp without time zone\", None),\n (u\"user_id\", u\"text\", None),\n (u\"event\", u\"text\", None),\n (u\"metadata\", u\"jsonb\", None),\n ]\n assert schema == expected_schema\n\n\ndef test_drop_table(pawprint_default_tracker_db_with_table):\n \"\"\"Ensure that tables are deleted successfully.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n # make sure table exists\n with pytest.raises(ProgrammingError):\n tracker.create_table()\n\n tracker.drop_table()\n\n with pytest.raises(ProgrammingError):\n tracker.drop_table()\n\n\ndef test_instantiate_tracker_from_dot_file(drop_tracker_test_table):\n \"\"\"Test instantiating a Tracker with a dotfile instead of using db and table strings.\"\"\"\n\n # Write a dotfile to disk\n dotfile = {\n \"db\": \"postgresql:///little_bean_toes\",\n \"json_field\": \"such_fuzzy\",\n }\n\n with open(\".pawprint\", \"w\") as f:\n json.dump(dotfile, f)\n\n # Create a tracker from this dotfile\n tracker = pawprint.Tracker(dotfile=\".pawprint\", json_field=\"boop\")\n\n # Ensure all the entries are as they should be\n assert tracker.db == \"postgresql:///little_bean_toes\"\n assert tracker.table is None\n # assert tracker.logger is None\n assert tracker.json_field == \"boop\" # field present in dotfile but overwritten in init\n\n os.remove(\".pawprint\")\n\n\ndef test_create_table_with_other_options(\n drop_tracker_test_table, db_string, tracker_test_table_name\n):\n \"\"\"Ensure the table is correctly created with an alternative schema.\"\"\"\n\n schema = OrderedDict([(\"pk\", \"SERIAL PRIMARY KEY\"), (\"infofield\", \"TEXT\")])\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name, schema=schema)\n tracker.create_table()\n\n # Ensure its schema is correct\n schema = pd.io.sql.execute(\n \"SELECT column_name, data_type, character_maximum_length \"\n \"FROM INFORMATION_SCHEMA.COLUMNS \"\n \"WHERE table_name = '{}'\".format(tracker.table),\n tracker.db,\n ).fetchall()\n\n assert schema == [(\"pk\", \"integer\", None), (\"infofield\", \"text\", None)]\n\n\ndef test_write(drop_tracker_test_table, db_string, tracker_test_table_name):\n \"\"\"Test the tracking of an event.\"\"\"\n\n tracker = pawprint.Tracker(db=db_string, table=tracker_test_table_name, schema={\"id\": \"INT\"})\n tracker.create_table()\n\n # Check the table's empty\n assert pd.io.sql.execute(\n \"SELECT COUNT(*) FROM {}\".format(tracker.table), tracker.db\n ).fetchall() == [(0,)]\n\n # Add some data and check if the row count increases by one\n tracker.write(id=1337)\n assert pd.io.sql.execute(\n \"SELECT COUNT(*) FROM {}\".format(tracker.table), tracker.db\n ).fetchall() == [(1,)]\n\n # Pull the data and ensure it's correct\n data = pd.read_sql(\"SELECT * FROM {}\".format(tracker.table), tracker.db)\n assert isinstance(data, pd.DataFrame)\n assert len(data.columns) == 1\n assert data.columns[0] == \"id\"\n assert data.id[0] == 1337\n\n\ndef test_read(pawprint_default_tracker_db_with_table):\n \"\"\"Test pulling the data into a dataframe according to various simple filters.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n # Ensure the table is empty to begin with\n assert len(tracker.read()) == 0\n\n # Add some data\n tracker.write(user_id=\"Pawprint\", event=\"Testing !\")\n tracker.write(user_id=\"Pawprint\")\n tracker.write(event=\"No user\")\n tracker.write(\n user_id=\"import this\",\n event=\"very zen\",\n metadata={\n \"better\": \"forgiveness\",\n \"worse\": \"permission\",\n \"ordered\": [\"simple\", \"complex\", \"complicated\"],\n },\n )\n\n all_data = tracker.read()\n pawprint_events = tracker.read(user_id=\"Pawprint\")\n id_gt_events = tracker.read(id__gt=10)\n id_gte_lt_events = tracker.read(id__gte=1, id__lt=3)\n field_events = tracker.read(\"event\", id__lte=100, event=\"very zen\")\n contains_events = tracker.read(metadata__contains=\"better\")\n not_contains_events = tracker.read(metadata__contains=\"whisky\")\n\n assert len(all_data) == 4\n assert len(pawprint_events) == 2\n assert len(id_gt_events) == 0\n assert len(id_gte_lt_events) == 2\n assert len(field_events) == 1\n assert len(contains_events) == 1\n assert len(not_contains_events) == 0\n\n assert set(all_data.columns) == set([\"id\", \"user_id\", \"event\", \"metadata\", \"timestamp\"])\n assert set(field_events.columns) == set([\"event\"])\n\n\ndef test_counts(pawprint_default_tracker_db_with_table):\n \"\"\"Test counting a specific event, with date ranges and time resolutions.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n # Add a bunch of events\n query = (\n \"\"\"\n INSERT INTO {} (timestamp, user_id, event) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in'),\n ('2016-01-01 12:40', 'bob', 'logged_in'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in'),\n ('2016-01-02 00:00', 'dan', 'logged_in'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in'),\n ('2016-01-05 00:00', 'frank', 'logged_in'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in'),\n ('2016-01-20 00:00', 'hans', 'logged_in'),\n ('2016-02-01 00:00', 'iris', 'logged_in'),\n ('2016-02-01 00:00', 'james', 'logged_in'),\n ('2016-03-01 00:00', 'kelly', 'logged_in'),\n ('2016-03-01 00:00', 'laura', 'logged_in'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in')\n \"\"\"\n ).format(tracker.table)\n\n pd.io.sql.execute(query, tracker.db)\n\n logins_hourly = tracker.count(event=\"logged_in\", resolution=\"hour\")\n logins_daily = tracker.count(event=\"logged_in\")\n logins_weekly = tracker.count(event=\"logged_in\", resolution=\"week\")\n logins_monthly = tracker.count(event=\"logged_in\", resolution=\"month\")\n logins_weekly_left_range = tracker.count(\n event=\"logged_in\", resolution=\"week\", start=datetime(2016, 2, 1)\n )\n logins_weekly_right_range = tracker.count(\n event=\"logged_in\", resolution=\"week\", end=datetime(2016, 2, 1)\n )\n logins_daily_full_range = tracker.count(\n event=\"logged_in\", start=datetime(2016, 1, 15), end=datetime(2016, 2, 15)\n )\n\n # Hourly\n assert len(logins_hourly) == 8\n assert np.all(logins_hourly[\"count\"].values == [2, 1, 2, 1, 1, 1, 2, 2])\n\n # Daily\n assert len(logins_daily) == 7\n assert np.all(logins_daily[\"count\"].values == [3, 2, 1, 1, 1, 2, 2])\n\n # Weekly\n assert len(logins_weekly) == 5\n assert np.all(logins_weekly[\"count\"].values == [5, 2, 1, 2, 2])\n\n # Others\n assert len(logins_monthly) == 3\n assert len(logins_weekly_left_range) == 2 # weeks start on Monday\n assert len(logins_weekly_right_range) == 4 # and not at the start / end dates provided\n assert len(logins_daily_full_range) == 2\n\n\ndef test_sum_and_average(pawprint_default_tracker_db_with_table):\n \"\"\"Test aggregating a specific event, with date ranges and time resolutions.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n metadata = str('{\"val\": 1}').replace(\"'\", '\"')\n\n # Add a bunch of events\n query = (\n \"\"\"\n INSERT INTO {table} (timestamp, user_id, event, metadata) VALUES\n ('2016-01-01 12:30', 'alice', 'logged_in', '{metadata}'),\n ('2016-01-01 12:40', 'bob', 'logged_in', '{metadata}'),\n ('2016-01-01 16:00', 'charlotte', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'dan', 'logged_in', '{metadata}'),\n ('2016-01-02 00:00', 'elizabeth', 'logged_in', '{metadata}'),\n ('2016-01-05 00:00', 'frank', 'logged_in', '{metadata}'),\n ('2016-01-10 00:00', 'gabrielle', 'logged_in', '{metadata}'),\n ('2016-01-20 00:00', 'hans', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'iris', 'logged_in', '{metadata}'),\n ('2016-02-01 00:00', 'james', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'kelly', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'laura', 'logged_in', '{metadata}'),\n ('2016-03-01 00:00', 'mike', 'not_logged_in', '{metadata}')\n \"\"\"\n ).format(table=tracker.table, metadata=metadata)\n\n pd.io.sql.execute(query, tracker.db)\n\n x_sum_daily_all = tracker.sum(\"metadata__val\")\n x_sum_daily = tracker.sum(\"metadata__val\", event=\"logged_in\")\n\n x_avg_daily_all = tracker.average(\"metadata__val\", event=\"logged_in\")\n x_avg_daily = tracker.average(\"metadata__val\", event=\"logged_in\")\n\n assert len(x_sum_daily) == 7\n\n assert np.all(x_sum_daily_all[\"sum\"].values == [3, 2, 1, 1, 1, 2, 3])\n assert np.all(x_sum_daily[\"sum\"].values == [3, 2, 1, 1, 1, 2, 2])\n\n assert np.all(x_avg_daily_all[\"avg\"].values == [1, 1, 1, 1, 1, 1, 1])\n assert np.all(x_avg_daily[\"avg\"] == x_avg_daily_all[\"avg\"])\n\n\ndef test_parse_fields(pawprint_default_tracker_db):\n \"\"\"Test args passed to read() and _aggregate() are parsed correctly.\"\"\"\n\n tracker = pawprint_default_tracker_db\n\n # SELECT * FROM table\n args = ()\n assert tracker._parse_fields(*args) == \"*\"\n\n # SELECT event FROM table\n args = (\"event\",)\n assert tracker._parse_fields(*args) == \"event\"\n\n # SELECT user_id, timestamp FROM table\n args = (\"user_id\", \"timestamp\")\n assert tracker._parse_fields(*args) == \"user_id, timestamp\"\n\n # SELECT metadata #>> '{a, b}' FROM table\n args = (\"metadata__a__b\",)\n assert tracker._parse_fields(*args) == \"metadata #> '{a, b}' AS json_field\"\n\n\ndef test_parse_values(pawprint_default_tracker_db):\n \"\"\"Test parsing values for write().\"\"\"\n\n tracker = pawprint_default_tracker_db\n\n # INSERT INTO table (event) VALUES ('logged_in')\n args = (\"logged_in\",)\n assert tracker._parse_values(*args) == \"'logged_in'\"\n\n # INSERT INTO table (event, user_id) VALUES ('logged_in', 'hannah')\n args = (\"logged_in\", \"hannah\")\n assert tracker._parse_values(*args) == \"'logged_in', 'hannah'\"\n\n\ndef test_parse_conditionals(pawprint_default_tracker_db):\n \"\"\"Test kwargs passed to read() and _aggregate() are parsed correctly.\"\"\"\n\n tracker = pawprint_default_tracker_db\n\n # SELECT * FROM table\n kwargs = {}\n assert tracker._parse_conditionals(**kwargs) == \"\"\n\n # SELECT * FROM table WHERE user_id = 'Quentin'\n kwargs = {\"user_id\": \"Quentin\"}\n assert tracker._parse_conditionals(**kwargs) == \"WHERE user_id = 'Quentin'\"\n\n # SELECT * FROM table WHERE event = 'logged_in' AND user_id = 'Quentin'\n kwargs = {\"event\": \"logged_in\", \"user_id\": \"Quentin\"}\n assert tracker._parse_conditionals(**kwargs) in (\n \"WHERE event = 'logged_in' AND user_id = 'Quentin'\",\n \"WHERE user_id = 'Quentin' AND event = 'logged_in'\",\n )\n\n # SELECT * FROM table WHERE event IN ('logged_in', 'logged_out')\n kwargs = {\"event__in\": [\"logged_in\", \"logged_out\"]}\n assert tracker._parse_conditionals(**kwargs) == \"WHERE event IN ('logged_in', 'logged_out')\"\n\n\ndef test_accessing_json_fields(pawprint_default_tracker_db_with_table):\n \"\"\"Test some structured data pulling.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n # JSON objects in our tracking database\n simple = {\"integral\": \"derivative\"}\n medium = {\"montecarlo\": {\"prior\": \"likelihood\"}}\n difficult = {\n \"deepnet\": [\"mlp\", \"cnn\", \"rnn\"],\n \"ensembles\": {\"random\": \"forest\", \"always\": {\"cross_validate\": [\"kfold\", \"stratified\"]}},\n }\n\n tracker.write(event=\"maths\", metadata=simple)\n tracker.write(event=\"stats\", metadata=medium)\n tracker.write(event=\"ml\", metadata=difficult)\n\n maths_all = tracker.read(\"metadata__integral\")\n maths_condition = tracker.read(\"metadata__integral\", event=\"maths\")\n assert len(maths_all) == 3\n assert len(maths_condition) == 1\n assert list(maths_all.json_field) == [\"derivative\", None, None]\n\n stats = tracker.read(\"metadata__montecarlo__prior\").dropna()\n assert len(stats) == 1\n assert stats.json_field.iloc[0] == \"likelihood\"\n\n types_of_nn = tracker.read(\"metadata__deepnet\").dropna()\n best_nn = tracker.read(\"metadata__deepnet__1\").dropna()\n full_depth = tracker.read(\"metadata__ensembles__always__cross_validate__0\").dropna()\n assert len(types_of_nn) == 1\n assert len(best_nn) == 1\n assert best_nn.json_field.iloc[0] == \"cnn\"\n assert len(full_depth) == 1\n assert full_depth.json_field.iloc[0] == \"kfold\"\n\n\ndef test_json_maths(pawprint_default_tracker_db_with_table):\n \"\"\"More advanced operations on JSON subfields.\"\"\"\n\n tracker = pawprint_default_tracker_db_with_table\n\n tracker.write(event=\"whisky\", metadata={\"uigeadail\": {\"value\": 123, \"lagavulin\": [4, 2]}})\n tracker.write(event=\"whisky\", metadata={\"uigeadail\": {\"value\": 456, \"lagavulin\": [5, 0]}})\n tracker.write(event=\"whisky\", metadata={\"uigeadail\": {\"value\": 758, \"lagavulin\": [7, 10]}})\n tracker.write(event=\"armagnac\", metadata={\"age\": \"XO\"})\n tracker.write(event=\"armagnac\", metadata={\"age\": 15})\n\n assert len(tracker.read()) == 5\n assert len(tracker.read(metadata__uigeadail__contains=\"lagavulin\")) == 3\n assert len(tracker.read(metadata__uigeadail__value__gt=123)) == 2\n assert len(tracker.read(metadata__uigeadail__value__gte=123)) == 3\n\n whiskies = tracker.sum(\"metadata__uigeadail__value\")\n assert len(whiskies) == 1\n assert whiskies.iloc[0][\"sum\"] == 1337\n\n assert len(tracker.read(metadata__contains=\"age\")) == 2\n assert len(tracker.read(metadata__age=\"XO\")) == 1\n\n\ndef test_silent_write_errors():\n \"\"\"When a failure occurs in event write, it should fail silently.\"\"\"\n\n tracker = pawprint.Tracker(db=None, table=None)\n\n try:\n tracker.write(event=\"This will fail silently.\")\n except Exception:\n pytest.fail(\"Failed to fail silently.\")\n\n\ndef test_nonsilent_write_errors(error_logger):\n \"\"\"Test non-silent write errors that should output to the logger or raise exceptions.\"\"\"\n\n tracker = pawprint.Tracker(db=\"postgresql:///fail\", logger=error_logger)\n\n with pytest.raises(Exception):\n tracker.write()\n with pytest.raises(Exception):\n tracker.write(event=\"going_to_fail\")\n\n with open(\"pawprint.log\", mode=\"r\") as f:\n logs = f.readlines()\n print(logs[3])\n\n assert len(logs) == 6\n assert logs[0].startswith(\"pawprint: pawprint failed to write.\")\n assert \"Table: None. Query: INSERT INTO None () VALUES ();\" in logs[0]\n assert \"Query: INSERT INTO None (event) VALUES ('going_to_fail')\" in logs[3]\n\n os.remove(\"pawprint.log\")\n\n\ndef test_auto_timestamp(db_string):\n \"\"\"Ensure that timestamps are autopopulated correctly if not passed.\"\"\"\n\n # Define a schema where the timestamp doesn't automatically populate through the database\n schema = {\"event\": \"TEXT\", \"timestamp\": \"TIMESTAMP\"}\n\n # Put together two trackers, one that autopopulates the timestamp\n no_auto = pawprint.Tracker(db=db_string, table=\"no_auto\", auto_timestamp=False, schema=schema)\n auto = pawprint.Tracker(db=db_string, table=\"auto\", auto_timestamp=True, schema=schema)\n\n # Create clean tables\n no_auto.create_table()\n auto.create_table()\n\n # Write events with no timestamp\n no_auto.write(event=\"foo\")\n auto.write(event=\"bar\")\n\n assert len(no_auto.read()) == 1\n assert len(auto.read()) == 1\n\n assert len(no_auto.read().dropna()) == 0\n assert len(auto.read().dropna()) == 1\n\n # Drop tables at the end\n no_auto.drop_table()\n auto.drop_table()\n\n\ndef test_repr_and_str(pawprint_default_tracker_db):\n \"\"\"Test the __repr__ and __str__.\"\"\"\n tracker = pawprint_default_tracker_db\n expected_repr = \"pawprint.Tracker on table '{}' and database '{}'\".format(\n tracker.table, tracker.db\n )\n expected_str = \"pawprint Tracker object.\\ndb : {}\\ntable : {}\".format(tracker.db, tracker.table)\n assert tracker.__repr__() == expected_repr\n assert tracker.__str__() == expected_str\n\n\ndef test_malicious_strings(pawprint_default_tracker_db_with_table):\n \"\"\"Test that SQL injection strings are sanitized\"\"\"\n tracker = pawprint_default_tracker_db_with_table\n\n tracker.write(\n event=\"armageddon\",\n metadata={\n \"shady business\": {\n \"with\": \"the following string\",\n \"of sql\": \"50');INSERT INTO {table} (event, user_id) VALUES \"\n \"('you got pwnd', '50\".format(table=tracker.table),\n }\n },\n )\n assert len(tracker.read()) == 1\n\n tracker.write(\n event=\"armageddon\",\n metadata={\n \"more shady business\": {\n \"my shady sql\": \"' OR '1'='1;DROP TABLE {table};\".format(table=tracker.table)\n }\n },\n )\n assert len(tracker.read()) == 2\n\n tracker.write(\n event=\"' OR '1'='1;\",\n metadata={\"foo\": \"x'); DROP TABLE {table}; --\".format(table=tracker.table)},\n )\n assert len(tracker.read()) == 3\n\n\ndef test_escaping_from_quotes(pawprint_default_tracker_db_with_table):\n tracker = pawprint_default_tracker_db_with_table\n tracker.write(\n event=\"known crummy string\",\n metadata={\n \"foo\": {\n \"toState\": \"#/app/dealnotes/2345/FORPETE'S_SAKE,_LLC_Tenant_Rep_Lease_2\",\n \"fromState\": \"#/app/dealdetails/2345\",\n \"platform\": \"iOS App\",\n }\n },\n )\n assert len(tracker.read()) == 1\n",
"step-ids": [
8,
15,
18,
20,
21
]
}
|
[
8,
15,
18,
20,
21
] |
# Make an array of dictionaries. Each dictionary should have keys:
#
# lat: the latitude
# lon: the longitude
# name: the waypoint name
#
# Make up three entries of various values.
waypoints = [
{ 'lat': 106.72888 },
{ 'lon': 0.69622 },
{ 'name': 'Kepulauan Riau' }
]
# Write a loop that prints out all the field values for all the waypoints
for dict in waypoints:
print(dict)
|
normal
|
{
"blob_id": "5eee3953193e0fc9f44b81059ce66997c22bc8f1",
"index": 6960,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor dict in waypoints:\n print(dict)\n",
"step-3": "waypoints = [{'lat': 106.72888}, {'lon': 0.69622}, {'name': 'Kepulauan Riau'}]\nfor dict in waypoints:\n print(dict)\n",
"step-4": "# Make an array of dictionaries. Each dictionary should have keys:\n#\n# lat: the latitude\n# lon: the longitude\n# name: the waypoint name\n#\n# Make up three entries of various values.\n\nwaypoints = [\n { 'lat': 106.72888 },\n { 'lon': 0.69622 },\n { 'name': 'Kepulauan Riau' }\n]\n\n# Write a loop that prints out all the field values for all the waypoints\nfor dict in waypoints:\n print(dict)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 22 20:21:16 2018
@author: Yijie
"""
#Q4:
#(1)
yours = ['Yale','MIT','Berkeley']
mine = ['Harvard','CAU','Stanford']
ours1 = mine + yours
ours2=[]
ours2.append(mine)
ours2.append(yours)
print(ours1)
print(ours2)
# Difference:the print out results indicate that the list 'ours1' is having
#the objects in 'yours' and 'mine' together, while 'ours2' has a dividing line
# between 'yours' and 'mine'.
#(2) question: do you want to remove something?
yours[1]='Mich'
print(ours1)
print(ours2)
#ours1 stays unchanged while ours2 changed because ours1 is a new list, while ours2 is adding
|
normal
|
{
"blob_id": "bf65d4a4e066e3e06b888d4b9ed49e10e66b4e78",
"index": 8145,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\n<mask token>\nprint(ours1)\nprint(ours2)\n",
"step-3": "<mask token>\nyours = ['Yale', 'MIT', 'Berkeley']\nmine = ['Harvard', 'CAU', 'Stanford']\nours1 = mine + yours\nours2 = []\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\nyours[1] = 'Mich'\nprint(ours1)\nprint(ours2)\n",
"step-4": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jan 22 20:21:16 2018\n\n@author: Yijie\n\"\"\"\n\n#Q4:\n#(1)\nyours = ['Yale','MIT','Berkeley']\nmine = ['Harvard','CAU','Stanford']\nours1 = mine + yours\nours2=[]\nours2.append(mine)\nours2.append(yours)\nprint(ours1)\nprint(ours2)\n# Difference:the print out results indicate that the list 'ours1' is having \n#the objects in 'yours' and 'mine' together, while 'ours2' has a dividing line\n# between 'yours' and 'mine'.\n\n#(2) question: do you want to remove something?\nyours[1]='Mich'\nprint(ours1)\nprint(ours2)\n#ours1 stays unchanged while ours2 changed because ours1 is a new list, while ours2 is adding ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/evn python
#-*-coding:utf8 -*-
import os, sys, json
class settings(object):
filename = ''
config = {}
def __init__(self):
self.DEBUG = os.environ.get('RdsMonitor_DEBUG', 0)
def get_settings(self):
"""Parses the settings from redis-live.conf.
"""
# TODO: Consider YAML. Human writable, machine readable.
with open(self.filename) as fp:
try:
return json.load(fp)
except Exception, e:
if self.DEBUG:
print >>sys.stderr, 'get_settings exception:', e
return {}
def get_redis_servers(self):
if self.DEBUG:
print >>sys.stderr, "get_redis_servers config:%s"%self.config
return self.config.get("RedisServers", '')
def get_redis_stats_server(self):
if self.DEBUG:
print >>sys.stderr, "get_redis_stats_server config:%s"%self.config
return self.config.get("RedisStatsServer", '')
def get_data_store_type(self):
if self.DEBUG:
print >>sys.stderr, "get_redis_stats_server config:%s"%self.config
return self.config.get("DataStoreType", '')
def get_sqlite_stats_store(self):
if self.DEBUG:
print >>sys.stderr, "get_redis_stats_server config:%s"%self.config
return self.config.get("SqliteStatsStore", '')
|
normal
|
{
"blob_id": "2c960685eaa14861c1c5b3ddb38b366a3e0e8e86",
"index": 1339,
"step-1": "#!/usr/bin/evn python\n#-*-coding:utf8 -*-\n\n\nimport os, sys, json\n\nclass settings(object):\n\tfilename = ''\n\tconfig = {}\n\t\n\tdef __init__(self):\n\t\tself.DEBUG = os.environ.get('RdsMonitor_DEBUG', 0)\n\t\t\n\tdef get_settings(self):\n\t\t\"\"\"Parses the settings from redis-live.conf.\n\t\t\"\"\"\n\n\t\t# TODO: Consider YAML. Human writable, machine readable.\n\t\twith open(self.filename) as fp:\n\t\t\ttry:\n\t\t\t\treturn json.load(fp)\n\t\t\texcept Exception, e:\n\t\t\t\tif self.DEBUG:\n\t\t\t\t\tprint >>sys.stderr, 'get_settings exception:', e\n\t\t\t\treturn {}\n\n\tdef get_redis_servers(self):\t\t\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_servers config:%s\"%self.config\n\t\treturn self.config.get(\"RedisServers\", '')\n\t\n\t\n\tdef get_redis_stats_server(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"RedisStatsServer\", '')\n\t\n\t\n\tdef get_data_store_type(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"DataStoreType\", '')\n\t\n\t\n\tdef get_sqlite_stats_store(self):\n\t\tif self.DEBUG:\n\t\t\tprint >>sys.stderr, \"get_redis_stats_server config:%s\"%self.config\n\t\treturn self.config.get(\"SqliteStatsStore\", '')",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import torch
from torchelie.data_learning import *
def test_pixel_image():
pi = PixelImage((1, 3, 128, 128), 0.01)
pi()
start = torch.randn(3, 128, 128)
pi = PixelImage((1, 3, 128, 128), init_img=start)
assert start.allclose(pi() + 0.5, atol=1e-7)
def test_spectral_image():
pi = SpectralImage((1, 3, 128, 128), 0.01)
pi()
start = torch.randn(1, 3, 128, 128)
pi = SpectralImage((1, 3, 128, 128), init_img=start)
def test_correlate_colors():
corr = CorrelateColors()
start = torch.randn(1, 3, 64, 64)
assert start.allclose(corr.invert(corr(start)), atol=1e-5)
def test_parameterized_img():
start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)
ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()
ParameterizedImg(1, 3,
128,
128,
space='spectral',
colors='uncorr',
init_img=start)()
ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()
start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)
ParameterizedImg(1, 3,
128,
129,
space='spectral',
colors='uncorr',
init_img=start)()
start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)
ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()
ParameterizedImg(1, 3,
128,
128,
space='pixel',
colors='uncorr',
init_img=start)()
ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()
ParameterizedImg(1, 3,
128,
128,
space='spectral',
colors='corr',
init_img=start)()
ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()
ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr',
init_img=start)()
|
normal
|
{
"blob_id": "73cacc1317c8624b45c017144bc7449bc99bd045",
"index": 9542,
"step-1": "<mask token>\n\n\ndef test_pixel_image():\n pi = PixelImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(3, 128, 128)\n pi = PixelImage((1, 3, 128, 128), init_img=start)\n assert start.allclose(pi() + 0.5, atol=1e-07)\n\n\n<mask token>\n\n\ndef test_parameterized_img():\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 129, space='spectral', colors='uncorr',\n init_img=start)()\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr', init_img\n =start)()\n",
"step-2": "<mask token>\n\n\ndef test_pixel_image():\n pi = PixelImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(3, 128, 128)\n pi = PixelImage((1, 3, 128, 128), init_img=start)\n assert start.allclose(pi() + 0.5, atol=1e-07)\n\n\ndef test_spectral_image():\n pi = SpectralImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(1, 3, 128, 128)\n pi = SpectralImage((1, 3, 128, 128), init_img=start)\n\n\n<mask token>\n\n\ndef test_parameterized_img():\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 129, space='spectral', colors='uncorr',\n init_img=start)()\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr', init_img\n =start)()\n",
"step-3": "<mask token>\n\n\ndef test_pixel_image():\n pi = PixelImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(3, 128, 128)\n pi = PixelImage((1, 3, 128, 128), init_img=start)\n assert start.allclose(pi() + 0.5, atol=1e-07)\n\n\ndef test_spectral_image():\n pi = SpectralImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(1, 3, 128, 128)\n pi = SpectralImage((1, 3, 128, 128), init_img=start)\n\n\ndef test_correlate_colors():\n corr = CorrelateColors()\n start = torch.randn(1, 3, 64, 64)\n assert start.allclose(corr.invert(corr(start)), atol=1e-05)\n\n\ndef test_parameterized_img():\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 129, space='spectral', colors='uncorr',\n init_img=start)()\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr', init_img\n =start)()\n",
"step-4": "import torch\nfrom torchelie.data_learning import *\n\n\ndef test_pixel_image():\n pi = PixelImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(3, 128, 128)\n pi = PixelImage((1, 3, 128, 128), init_img=start)\n assert start.allclose(pi() + 0.5, atol=1e-07)\n\n\ndef test_spectral_image():\n pi = SpectralImage((1, 3, 128, 128), 0.01)\n pi()\n start = torch.randn(1, 3, 128, 128)\n pi = SpectralImage((1, 3, 128, 128), init_img=start)\n\n\ndef test_correlate_colors():\n corr = CorrelateColors()\n start = torch.randn(1, 3, 64, 64)\n assert start.allclose(corr.invert(corr(start)), atol=1e-05)\n\n\ndef test_parameterized_img():\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 129, space='spectral', colors='uncorr',\n init_img=start)()\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr',\n init_img=start)()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr', init_img\n =start)()\n",
"step-5": "import torch\nfrom torchelie.data_learning import *\n\n\ndef test_pixel_image():\n pi = PixelImage((1, 3, 128, 128), 0.01)\n pi()\n\n start = torch.randn(3, 128, 128)\n pi = PixelImage((1, 3, 128, 128), init_img=start)\n\n assert start.allclose(pi() + 0.5, atol=1e-7)\n\n\ndef test_spectral_image():\n pi = SpectralImage((1, 3, 128, 128), 0.01)\n pi()\n\n start = torch.randn(1, 3, 128, 128)\n pi = SpectralImage((1, 3, 128, 128), init_img=start)\n\n\ndef test_correlate_colors():\n corr = CorrelateColors()\n start = torch.randn(1, 3, 64, 64)\n assert start.allclose(corr.invert(corr(start)), atol=1e-5)\n\n\ndef test_parameterized_img():\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n ParameterizedImg(1, 3,\n 128,\n 128,\n space='spectral',\n colors='uncorr',\n init_img=start)()\n\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='uncorr')()\n\n start = torch.clamp(torch.randn(1, 3, 128, 129) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3,\n 128,\n 129,\n space='spectral',\n colors='uncorr',\n init_img=start)()\n start = torch.clamp(torch.randn(1, 3, 128, 128) + 0.5, min=0, max=1)\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='uncorr')()\n ParameterizedImg(1, 3,\n 128,\n 128,\n space='pixel',\n colors='uncorr',\n init_img=start)()\n\n ParameterizedImg(1, 3, 128, 128, space='spectral', colors='corr')()\n ParameterizedImg(1, 3,\n 128,\n 128,\n space='spectral',\n colors='corr',\n init_img=start)()\n\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr')()\n ParameterizedImg(1, 3, 128, 128, space='pixel', colors='corr',\n init_img=start)()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
input = open('in.txt')
output = open('out.py', 'w+')
def opstr(op):
if op == 'RSHIFT': return '>>'
if op == 'LSHIFT': return '<<'
if op == 'OR': return '|'
if op == 'AND': return '&'
if op == 'NOT': return '~'
raise RuntimeError('Unknown {0}'.format(op))
def funstr(fun):
return '{0}_fn'.format(fun)
def topython(line):
line.strip()
body, result = line.split('->')
fun_name = funstr(result.strip())
fun_body = '''
def {name}():
result = precalc.get(\'{name}\')
if result is None:
result = {calc}
precalc[\'{name}\'] = result
return result
'''
calc = str()
for part in body.strip().split(' '):
if part.isupper():
calc += opstr(part) + ' '
elif part.islower():
calc += funstr(part) + '() '
else:
calc += part + ' '
return fun_body.format(name=fun_name, calc=calc)
with open('in.txt') as input, open('out.py', 'w+') as output:
output.write('precalc = dict()')
for line in input:
output.write(topython(line) + '\n')
output.write('print(a_fn())\n')
import out
|
normal
|
{
"blob_id": "e68588dff0e54fa03dbb1c629c39d8312a0df26d",
"index": 7230,
"step-1": "<mask token>\n\n\ndef opstr(op):\n if op == 'RSHIFT':\n return '>>'\n if op == 'LSHIFT':\n return '<<'\n if op == 'OR':\n return '|'\n if op == 'AND':\n return '&'\n if op == 'NOT':\n return '~'\n raise RuntimeError('Unknown {0}'.format(op))\n\n\ndef funstr(fun):\n return '{0}_fn'.format(fun)\n\n\ndef topython(line):\n line.strip()\n body, result = line.split('->')\n fun_name = funstr(result.strip())\n fun_body = \"\"\"\ndef {name}():\n result = precalc.get('{name}')\n if result is None:\n result = {calc}\n precalc['{name}'] = result\n return result\n\"\"\"\n calc = str()\n for part in body.strip().split(' '):\n if part.isupper():\n calc += opstr(part) + ' '\n elif part.islower():\n calc += funstr(part) + '() '\n else:\n calc += part + ' '\n return fun_body.format(name=fun_name, calc=calc)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef opstr(op):\n if op == 'RSHIFT':\n return '>>'\n if op == 'LSHIFT':\n return '<<'\n if op == 'OR':\n return '|'\n if op == 'AND':\n return '&'\n if op == 'NOT':\n return '~'\n raise RuntimeError('Unknown {0}'.format(op))\n\n\ndef funstr(fun):\n return '{0}_fn'.format(fun)\n\n\ndef topython(line):\n line.strip()\n body, result = line.split('->')\n fun_name = funstr(result.strip())\n fun_body = \"\"\"\ndef {name}():\n result = precalc.get('{name}')\n if result is None:\n result = {calc}\n precalc['{name}'] = result\n return result\n\"\"\"\n calc = str()\n for part in body.strip().split(' '):\n if part.isupper():\n calc += opstr(part) + ' '\n elif part.islower():\n calc += funstr(part) + '() '\n else:\n calc += part + ' '\n return fun_body.format(name=fun_name, calc=calc)\n\n\nwith open('in.txt') as input, open('out.py', 'w+') as output:\n output.write('precalc = dict()')\n for line in input:\n output.write(topython(line) + '\\n')\n output.write('print(a_fn())\\n')\n<mask token>\n",
"step-3": "input = open('in.txt')\noutput = open('out.py', 'w+')\n\n\ndef opstr(op):\n if op == 'RSHIFT':\n return '>>'\n if op == 'LSHIFT':\n return '<<'\n if op == 'OR':\n return '|'\n if op == 'AND':\n return '&'\n if op == 'NOT':\n return '~'\n raise RuntimeError('Unknown {0}'.format(op))\n\n\ndef funstr(fun):\n return '{0}_fn'.format(fun)\n\n\ndef topython(line):\n line.strip()\n body, result = line.split('->')\n fun_name = funstr(result.strip())\n fun_body = \"\"\"\ndef {name}():\n result = precalc.get('{name}')\n if result is None:\n result = {calc}\n precalc['{name}'] = result\n return result\n\"\"\"\n calc = str()\n for part in body.strip().split(' '):\n if part.isupper():\n calc += opstr(part) + ' '\n elif part.islower():\n calc += funstr(part) + '() '\n else:\n calc += part + ' '\n return fun_body.format(name=fun_name, calc=calc)\n\n\nwith open('in.txt') as input, open('out.py', 'w+') as output:\n output.write('precalc = dict()')\n for line in input:\n output.write(topython(line) + '\\n')\n output.write('print(a_fn())\\n')\n<mask token>\n",
"step-4": "input = open('in.txt')\noutput = open('out.py', 'w+')\n\n\ndef opstr(op):\n if op == 'RSHIFT':\n return '>>'\n if op == 'LSHIFT':\n return '<<'\n if op == 'OR':\n return '|'\n if op == 'AND':\n return '&'\n if op == 'NOT':\n return '~'\n raise RuntimeError('Unknown {0}'.format(op))\n\n\ndef funstr(fun):\n return '{0}_fn'.format(fun)\n\n\ndef topython(line):\n line.strip()\n body, result = line.split('->')\n fun_name = funstr(result.strip())\n fun_body = \"\"\"\ndef {name}():\n result = precalc.get('{name}')\n if result is None:\n result = {calc}\n precalc['{name}'] = result\n return result\n\"\"\"\n calc = str()\n for part in body.strip().split(' '):\n if part.isupper():\n calc += opstr(part) + ' '\n elif part.islower():\n calc += funstr(part) + '() '\n else:\n calc += part + ' '\n return fun_body.format(name=fun_name, calc=calc)\n\n\nwith open('in.txt') as input, open('out.py', 'w+') as output:\n output.write('precalc = dict()')\n for line in input:\n output.write(topython(line) + '\\n')\n output.write('print(a_fn())\\n')\nimport out\n",
"step-5": "input = open('in.txt')\noutput = open('out.py', 'w+')\n\ndef opstr(op):\n if op == 'RSHIFT': return '>>'\n if op == 'LSHIFT': return '<<'\n if op == 'OR': return '|'\n if op == 'AND': return '&'\n if op == 'NOT': return '~'\n raise RuntimeError('Unknown {0}'.format(op))\n\n\ndef funstr(fun):\n return '{0}_fn'.format(fun)\n\n\ndef topython(line):\n line.strip()\n body, result = line.split('->')\n fun_name = funstr(result.strip())\n fun_body = '''\ndef {name}():\n result = precalc.get(\\'{name}\\')\n if result is None:\n result = {calc}\n precalc[\\'{name}\\'] = result\n return result\n'''\n calc = str()\n for part in body.strip().split(' '):\n if part.isupper():\n calc += opstr(part) + ' '\n elif part.islower():\n calc += funstr(part) + '() '\n else:\n calc += part + ' '\n \n return fun_body.format(name=fun_name, calc=calc) \n\nwith open('in.txt') as input, open('out.py', 'w+') as output:\n output.write('precalc = dict()')\n for line in input:\n output.write(topython(line) + '\\n')\n\n output.write('print(a_fn())\\n')\n\nimport out\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
# created by Angus Clark 9/2/17 updated 27/2/17
# ToDo impliment traceroute function into this
# Perhaps get rid of unnecessary itemediate temp file
import socket
import os
import json
import my_traceroute
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '130.56.253.43'
#print host
port = 5201 # Change port (must enable security settigns of server)
s.bind((host,port))
s.listen(5)
MAX_HOPS = 30 # max hops for traceroute
while True:
c, addr = s.accept() #accept incoming Connection
f = open('temp.json','wb') # open blank binary to dump incoming data
#print addr[0]
l = c.recv(1024)
while(l):
# Dump data into temp file and get next chunk of data
f.write(l)
l = c.recv(1024)
f.close()
c.close()
tempfile = open('temp.json','rb')
info = json.load(tempfile)
info["UserInfo"]["ip"] = addr[0] # store ip address of sender
last_addr = '0.0.0.0' # placeholder for first iteration
for hop in range(1,MAX_HOPS):
result = my_traceroute.traceroute(hop, info["UserInfo"]["ip"])
#print result
if result == -1:
break
if result[1] == last_addr:
break
info["TRACEROUTE"][str(result[0])] = {}
info["TRACEROUTE"][str(result[0])].update({'node':result[1], 'rtt':result[2]})
last_addr = result[1]
id = info["UserInfo"]["user id"]
timestamp = info["UserInfo"]["timestamp"]
os.system('mkdir /home/ubuntu/data/'+id)
path = "/home/ubuntu/data/" + id + "/"
filename = timestamp + '.json'
savefile = open(path + filename, 'w+')
savefile.write(json.dumps(info))
savefile.close()
|
normal
|
{
"blob_id": "792f62c72f1667f651567314b062d862abbc9aa5",
"index": 6692,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ns.bind((host, port))\ns.listen(5)\n<mask token>\nwhile True:\n c, addr = s.accept()\n f = open('temp.json', 'wb')\n l = c.recv(1024)\n while l:\n f.write(l)\n l = c.recv(1024)\n f.close()\n c.close()\n tempfile = open('temp.json', 'rb')\n info = json.load(tempfile)\n info['UserInfo']['ip'] = addr[0]\n last_addr = '0.0.0.0'\n for hop in range(1, MAX_HOPS):\n result = my_traceroute.traceroute(hop, info['UserInfo']['ip'])\n if result == -1:\n break\n if result[1] == last_addr:\n break\n info['TRACEROUTE'][str(result[0])] = {}\n info['TRACEROUTE'][str(result[0])].update({'node': result[1], 'rtt':\n result[2]})\n last_addr = result[1]\n id = info['UserInfo']['user id']\n timestamp = info['UserInfo']['timestamp']\n os.system('mkdir /home/ubuntu/data/' + id)\n path = '/home/ubuntu/data/' + id + '/'\n filename = timestamp + '.json'\n savefile = open(path + filename, 'w+')\n savefile.write(json.dumps(info))\n savefile.close()\n",
"step-3": "<mask token>\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nhost = '130.56.253.43'\nport = 5201\ns.bind((host, port))\ns.listen(5)\nMAX_HOPS = 30\nwhile True:\n c, addr = s.accept()\n f = open('temp.json', 'wb')\n l = c.recv(1024)\n while l:\n f.write(l)\n l = c.recv(1024)\n f.close()\n c.close()\n tempfile = open('temp.json', 'rb')\n info = json.load(tempfile)\n info['UserInfo']['ip'] = addr[0]\n last_addr = '0.0.0.0'\n for hop in range(1, MAX_HOPS):\n result = my_traceroute.traceroute(hop, info['UserInfo']['ip'])\n if result == -1:\n break\n if result[1] == last_addr:\n break\n info['TRACEROUTE'][str(result[0])] = {}\n info['TRACEROUTE'][str(result[0])].update({'node': result[1], 'rtt':\n result[2]})\n last_addr = result[1]\n id = info['UserInfo']['user id']\n timestamp = info['UserInfo']['timestamp']\n os.system('mkdir /home/ubuntu/data/' + id)\n path = '/home/ubuntu/data/' + id + '/'\n filename = timestamp + '.json'\n savefile = open(path + filename, 'w+')\n savefile.write(json.dumps(info))\n savefile.close()\n",
"step-4": "import socket\nimport os\nimport json\nimport my_traceroute\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nhost = '130.56.253.43'\nport = 5201\ns.bind((host, port))\ns.listen(5)\nMAX_HOPS = 30\nwhile True:\n c, addr = s.accept()\n f = open('temp.json', 'wb')\n l = c.recv(1024)\n while l:\n f.write(l)\n l = c.recv(1024)\n f.close()\n c.close()\n tempfile = open('temp.json', 'rb')\n info = json.load(tempfile)\n info['UserInfo']['ip'] = addr[0]\n last_addr = '0.0.0.0'\n for hop in range(1, MAX_HOPS):\n result = my_traceroute.traceroute(hop, info['UserInfo']['ip'])\n if result == -1:\n break\n if result[1] == last_addr:\n break\n info['TRACEROUTE'][str(result[0])] = {}\n info['TRACEROUTE'][str(result[0])].update({'node': result[1], 'rtt':\n result[2]})\n last_addr = result[1]\n id = info['UserInfo']['user id']\n timestamp = info['UserInfo']['timestamp']\n os.system('mkdir /home/ubuntu/data/' + id)\n path = '/home/ubuntu/data/' + id + '/'\n filename = timestamp + '.json'\n savefile = open(path + filename, 'w+')\n savefile.write(json.dumps(info))\n savefile.close()\n",
"step-5": "# created by Angus Clark 9/2/17 updated 27/2/17\n# ToDo impliment traceroute function into this \n# Perhaps get rid of unnecessary itemediate temp file\n\nimport socket\nimport os\nimport json\nimport my_traceroute\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM) \nhost = '130.56.253.43'\n#print host\nport = 5201 # Change port (must enable security settigns of server)\ns.bind((host,port))\ns.listen(5)\nMAX_HOPS = 30 # max hops for traceroute\n\nwhile True:\n c, addr = s.accept() #accept incoming Connection\n f = open('temp.json','wb') # open blank binary to dump incoming data\n #print addr[0]\n l = c.recv(1024)\n while(l):\n # Dump data into temp file and get next chunk of data\n f.write(l)\n l = c.recv(1024)\n f.close()\n c.close()\n \n tempfile = open('temp.json','rb')\n info = json.load(tempfile)\n info[\"UserInfo\"][\"ip\"] = addr[0] # store ip address of sender\n \n last_addr = '0.0.0.0' # placeholder for first iteration\n for hop in range(1,MAX_HOPS):\n result = my_traceroute.traceroute(hop, info[\"UserInfo\"][\"ip\"])\n #print result\n if result == -1:\n break\n if result[1] == last_addr:\n break\n info[\"TRACEROUTE\"][str(result[0])] = {}\n info[\"TRACEROUTE\"][str(result[0])].update({'node':result[1], 'rtt':result[2]})\n last_addr = result[1]\n \n \n id = info[\"UserInfo\"][\"user id\"]\n timestamp = info[\"UserInfo\"][\"timestamp\"]\n \n os.system('mkdir /home/ubuntu/data/'+id)\n path = \"/home/ubuntu/data/\" + id + \"/\"\n filename = timestamp + '.json'\n \n savefile = open(path + filename, 'w+')\n savefile.write(json.dumps(info))\n savefile.close()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#/usr/bin/env python
#v0.2
import random, time
mapHeight = 30
mapWidth = 30
fillPercent = 45
def generateNoise():
#generate a grid of cells with height = mapHeight and width = mapWidth with each cell either "walls" (true) or "floors" (false)
#border is guaranteed to be walls and all other spaces have a fillPercent chance of being walls
caveMap = []
column = 1
row = 1
while column <= mapWidth:
while row <= mapHeight:
if (column == 1) or (column == mapWidth) or (row == 1) or (row == mapHeight):
caveMap.append([column, row, 1])
else:
if random.randrange(1,100) <= fillPercent:
caveMap.append([column, row, 1])
else:
caveMap.append([column,row,0])
row += 1
column += 1
row = 1
printCaveMap(caveMap)
return caveMap
def isOutOfBounds(column, row):
#find if a cell is out of bounds based on map size
if column < 1 or row < 1:
return True
elif column > mapWidth or row > mapHeight:
return True
else:
return False
def isWall(caveMap, column, row):
#determine if a cell is a wall or not
#very inefficient - might have to loop through entire list
for cell in caveMap:
if cell[0] == column and cell[1] == row and cell[2] == 1:
return True
elif cell[0] == column and cell[1] == row and cell[2] == 0:
return False
else:
continue
def findNeighbors(caveMap, column, row):
#find the number of walls in a 3x3 pattern around a given cell (determined by column and row)
#there must be a more efficient way to do this, but here we are
neighbors = 0
if isOutOfBounds(column -1, row -1):
neighbors += 1
elif isWall(caveMap, column -1, row -1):
neighbors += 1
if isOutOfBounds(column, row -1):
neighbors += 1
elif isWall(caveMap, column, row -1):
neighbors += 1
if isOutOfBounds(column +1, row -1):
neighbors += 1
elif isWall(caveMap, column +1, row -1):
neighbors += 1
if isOutOfBounds(column -1, row):
neighbors += 1
elif isWall(caveMap, column -1, row):
neighbors += 1
if isOutOfBounds(column +1, row):
neighbors += 1
elif isWall(caveMap, column +1, row):
neighbors += 1
if isOutOfBounds(column -1, row +1):
neighbors += 1
elif isWall(caveMap, column -1, row +1):
neighbors += 1
if isOutOfBounds(column, row +1):
neighbors += 1
elif isWall(caveMap, column, row +1):
neighbors += 1
if isOutOfBounds(column +1, row +1):
neighbors += 1
elif isWall(caveMap, column +1, row +1):
neighbors += 1
return neighbors
def runGeneration (caveMap, generations):
#smooth out random noise using modified 4-5 cellular automata rules
#the entire process is pretty inefficient - it has to loop through the entire list as many as
#(mapWidth * mapHeight * 8) times for potentially millions of comparisons
i =0
for i in range(0, generations):
start_time = time.time()
for cell in caveMap:
if findNeighbors(caveMap,cell[0],cell[1]) < 3:
cell[2] = 0
elif findNeighbors(caveMap, cell[0], cell[1]) > 5:
cell[2] = 1
printCaveMap(caveMap)
end_time = time.time()
print(end_time - start_time, " seconds")
return caveMap
def printCaveMap(caveMap):
#print the map by displaying a grid of characters where # = walls and spaces = floors
#just uses mapWidth to insert returns, very agnostic about the column/row of a cell
i = 1
for item in caveMap:
if i == mapWidth + 1:
print('\r')
i = 1
if item[2] == 1:
print(" # ", end="")
else:
print(" ", end="")
i += 1
print("\n", "\n")
def main():
caveMap = generateNoise()
runGeneration(caveMap, 2)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "7feac838f17ef1e4338190c0e8c284ed99369693",
"index": 1628,
"step-1": "<mask token>\n\n\ndef generateNoise():\n caveMap = []\n column = 1\n row = 1\n while column <= mapWidth:\n while row <= mapHeight:\n if (column == 1 or column == mapWidth or row == 1 or row ==\n mapHeight):\n caveMap.append([column, row, 1])\n elif random.randrange(1, 100) <= fillPercent:\n caveMap.append([column, row, 1])\n else:\n caveMap.append([column, row, 0])\n row += 1\n column += 1\n row = 1\n printCaveMap(caveMap)\n return caveMap\n\n\n<mask token>\n\n\ndef isWall(caveMap, column, row):\n for cell in caveMap:\n if cell[0] == column and cell[1] == row and cell[2] == 1:\n return True\n elif cell[0] == column and cell[1] == row and cell[2] == 0:\n return False\n else:\n continue\n\n\ndef findNeighbors(caveMap, column, row):\n neighbors = 0\n if isOutOfBounds(column - 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column, row - 1):\n neighbors += 1\n elif isWall(caveMap, column, row - 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column - 1, row):\n neighbors += 1\n elif isWall(caveMap, column - 1, row):\n neighbors += 1\n if isOutOfBounds(column + 1, row):\n neighbors += 1\n elif isWall(caveMap, column + 1, row):\n neighbors += 1\n if isOutOfBounds(column - 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row + 1):\n neighbors += 1\n if isOutOfBounds(column, row + 1):\n neighbors += 1\n elif isWall(caveMap, column, row + 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row + 1):\n neighbors += 1\n return neighbors\n\n\ndef runGeneration(caveMap, generations):\n i = 0\n for i in range(0, generations):\n start_time = time.time()\n for cell in caveMap:\n if findNeighbors(caveMap, cell[0], cell[1]) < 3:\n cell[2] = 0\n elif findNeighbors(caveMap, cell[0], cell[1]) > 5:\n cell[2] = 1\n printCaveMap(caveMap)\n end_time = time.time()\n print(end_time - start_time, ' seconds')\n return caveMap\n\n\n<mask token>\n\n\ndef main():\n caveMap = generateNoise()\n runGeneration(caveMap, 2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef generateNoise():\n caveMap = []\n column = 1\n row = 1\n while column <= mapWidth:\n while row <= mapHeight:\n if (column == 1 or column == mapWidth or row == 1 or row ==\n mapHeight):\n caveMap.append([column, row, 1])\n elif random.randrange(1, 100) <= fillPercent:\n caveMap.append([column, row, 1])\n else:\n caveMap.append([column, row, 0])\n row += 1\n column += 1\n row = 1\n printCaveMap(caveMap)\n return caveMap\n\n\n<mask token>\n\n\ndef isWall(caveMap, column, row):\n for cell in caveMap:\n if cell[0] == column and cell[1] == row and cell[2] == 1:\n return True\n elif cell[0] == column and cell[1] == row and cell[2] == 0:\n return False\n else:\n continue\n\n\ndef findNeighbors(caveMap, column, row):\n neighbors = 0\n if isOutOfBounds(column - 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column, row - 1):\n neighbors += 1\n elif isWall(caveMap, column, row - 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column - 1, row):\n neighbors += 1\n elif isWall(caveMap, column - 1, row):\n neighbors += 1\n if isOutOfBounds(column + 1, row):\n neighbors += 1\n elif isWall(caveMap, column + 1, row):\n neighbors += 1\n if isOutOfBounds(column - 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row + 1):\n neighbors += 1\n if isOutOfBounds(column, row + 1):\n neighbors += 1\n elif isWall(caveMap, column, row + 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row + 1):\n neighbors += 1\n return neighbors\n\n\ndef runGeneration(caveMap, generations):\n i = 0\n for i in range(0, generations):\n start_time = time.time()\n for cell in caveMap:\n if findNeighbors(caveMap, cell[0], cell[1]) < 3:\n cell[2] = 0\n elif findNeighbors(caveMap, cell[0], cell[1]) > 5:\n cell[2] = 1\n printCaveMap(caveMap)\n end_time = time.time()\n print(end_time - start_time, ' seconds')\n return caveMap\n\n\ndef printCaveMap(caveMap):\n i = 1\n for item in caveMap:\n if i == mapWidth + 1:\n print('\\r')\n i = 1\n if item[2] == 1:\n print(' # ', end='')\n else:\n print(' ', end='')\n i += 1\n print('\\n', '\\n')\n\n\ndef main():\n caveMap = generateNoise()\n runGeneration(caveMap, 2)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef generateNoise():\n caveMap = []\n column = 1\n row = 1\n while column <= mapWidth:\n while row <= mapHeight:\n if (column == 1 or column == mapWidth or row == 1 or row ==\n mapHeight):\n caveMap.append([column, row, 1])\n elif random.randrange(1, 100) <= fillPercent:\n caveMap.append([column, row, 1])\n else:\n caveMap.append([column, row, 0])\n row += 1\n column += 1\n row = 1\n printCaveMap(caveMap)\n return caveMap\n\n\ndef isOutOfBounds(column, row):\n if column < 1 or row < 1:\n return True\n elif column > mapWidth or row > mapHeight:\n return True\n else:\n return False\n\n\ndef isWall(caveMap, column, row):\n for cell in caveMap:\n if cell[0] == column and cell[1] == row and cell[2] == 1:\n return True\n elif cell[0] == column and cell[1] == row and cell[2] == 0:\n return False\n else:\n continue\n\n\ndef findNeighbors(caveMap, column, row):\n neighbors = 0\n if isOutOfBounds(column - 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column, row - 1):\n neighbors += 1\n elif isWall(caveMap, column, row - 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column - 1, row):\n neighbors += 1\n elif isWall(caveMap, column - 1, row):\n neighbors += 1\n if isOutOfBounds(column + 1, row):\n neighbors += 1\n elif isWall(caveMap, column + 1, row):\n neighbors += 1\n if isOutOfBounds(column - 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row + 1):\n neighbors += 1\n if isOutOfBounds(column, row + 1):\n neighbors += 1\n elif isWall(caveMap, column, row + 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row + 1):\n neighbors += 1\n return neighbors\n\n\ndef runGeneration(caveMap, generations):\n i = 0\n for i in range(0, generations):\n start_time = time.time()\n for cell in caveMap:\n if findNeighbors(caveMap, cell[0], cell[1]) < 3:\n cell[2] = 0\n elif findNeighbors(caveMap, cell[0], cell[1]) > 5:\n cell[2] = 1\n printCaveMap(caveMap)\n end_time = time.time()\n print(end_time - start_time, ' seconds')\n return caveMap\n\n\ndef printCaveMap(caveMap):\n i = 1\n for item in caveMap:\n if i == mapWidth + 1:\n print('\\r')\n i = 1\n if item[2] == 1:\n print(' # ', end='')\n else:\n print(' ', end='')\n i += 1\n print('\\n', '\\n')\n\n\ndef main():\n caveMap = generateNoise()\n runGeneration(caveMap, 2)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nmapHeight = 30\nmapWidth = 30\nfillPercent = 45\n\n\ndef generateNoise():\n caveMap = []\n column = 1\n row = 1\n while column <= mapWidth:\n while row <= mapHeight:\n if (column == 1 or column == mapWidth or row == 1 or row ==\n mapHeight):\n caveMap.append([column, row, 1])\n elif random.randrange(1, 100) <= fillPercent:\n caveMap.append([column, row, 1])\n else:\n caveMap.append([column, row, 0])\n row += 1\n column += 1\n row = 1\n printCaveMap(caveMap)\n return caveMap\n\n\ndef isOutOfBounds(column, row):\n if column < 1 or row < 1:\n return True\n elif column > mapWidth or row > mapHeight:\n return True\n else:\n return False\n\n\ndef isWall(caveMap, column, row):\n for cell in caveMap:\n if cell[0] == column and cell[1] == row and cell[2] == 1:\n return True\n elif cell[0] == column and cell[1] == row and cell[2] == 0:\n return False\n else:\n continue\n\n\ndef findNeighbors(caveMap, column, row):\n neighbors = 0\n if isOutOfBounds(column - 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column, row - 1):\n neighbors += 1\n elif isWall(caveMap, column, row - 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row - 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row - 1):\n neighbors += 1\n if isOutOfBounds(column - 1, row):\n neighbors += 1\n elif isWall(caveMap, column - 1, row):\n neighbors += 1\n if isOutOfBounds(column + 1, row):\n neighbors += 1\n elif isWall(caveMap, column + 1, row):\n neighbors += 1\n if isOutOfBounds(column - 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column - 1, row + 1):\n neighbors += 1\n if isOutOfBounds(column, row + 1):\n neighbors += 1\n elif isWall(caveMap, column, row + 1):\n neighbors += 1\n if isOutOfBounds(column + 1, row + 1):\n neighbors += 1\n elif isWall(caveMap, column + 1, row + 1):\n neighbors += 1\n return neighbors\n\n\ndef runGeneration(caveMap, generations):\n i = 0\n for i in range(0, generations):\n start_time = time.time()\n for cell in caveMap:\n if findNeighbors(caveMap, cell[0], cell[1]) < 3:\n cell[2] = 0\n elif findNeighbors(caveMap, cell[0], cell[1]) > 5:\n cell[2] = 1\n printCaveMap(caveMap)\n end_time = time.time()\n print(end_time - start_time, ' seconds')\n return caveMap\n\n\ndef printCaveMap(caveMap):\n i = 1\n for item in caveMap:\n if i == mapWidth + 1:\n print('\\r')\n i = 1\n if item[2] == 1:\n print(' # ', end='')\n else:\n print(' ', end='')\n i += 1\n print('\\n', '\\n')\n\n\ndef main():\n caveMap = generateNoise()\n runGeneration(caveMap, 2)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#/usr/bin/env python\r\n#v0.2\r\nimport random, time\r\n\r\nmapHeight = 30\r\nmapWidth = 30\r\nfillPercent = 45\r\n\r\ndef generateNoise():\r\n\t#generate a grid of cells with height = mapHeight and width = mapWidth with each cell either \"walls\" (true) or \"floors\" (false)\r\n\t#border is guaranteed to be walls and all other spaces have a fillPercent chance of being walls\r\n\tcaveMap = []\r\n\tcolumn = 1\r\n\trow = 1\r\n\t\r\n\twhile column <= mapWidth:\r\n\t\twhile row <= mapHeight:\r\n\t\t\tif (column == 1) or (column == mapWidth) or (row == 1) or (row == mapHeight):\r\n\t\t\t\tcaveMap.append([column, row, 1])\r\n\t\t\telse:\r\n\t\t\t\tif random.randrange(1,100) <= fillPercent:\r\n\t\t\t\t\tcaveMap.append([column, row, 1])\r\n\t\t\t\telse:\r\n\t\t\t\t\tcaveMap.append([column,row,0])\r\n\t\t\trow += 1\r\n\t\tcolumn += 1\r\n\t\trow = 1\r\n\t\t\r\n\tprintCaveMap(caveMap)\t\t\r\n\treturn caveMap\r\n\r\ndef isOutOfBounds(column, row):\r\n\t#find if a cell is out of bounds based on map size\r\n\t\r\n\tif column < 1 or row < 1:\r\n\t\treturn True\r\n\telif column > mapWidth or row > mapHeight:\r\n\t\treturn True\r\n\telse:\r\n\t\treturn False\r\n\r\ndef isWall(caveMap, column, row):\r\n\t#determine if a cell is a wall or not\r\n\t#very inefficient - might have to loop through entire list\r\n\r\n\tfor cell in caveMap:\r\n\t\tif cell[0] == column and cell[1] == row and cell[2] == 1:\r\n\t\t\treturn True\r\n\t\telif cell[0] == column and cell[1] == row and cell[2] == 0:\r\n\t\t\treturn False\r\n\t\telse:\r\n\t\t\tcontinue\r\n\t\r\ndef findNeighbors(caveMap, column, row):\r\n\t#find the number of walls in a 3x3 pattern around a given cell (determined by column and row)\r\n\t#there must be a more efficient way to do this, but here we are\r\n\r\n\tneighbors = 0\r\n\r\n\tif isOutOfBounds(column -1, row -1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column -1, row -1):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column, row -1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column, row -1):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column +1, row -1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column +1, row -1):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column -1, row):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column -1, row):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column +1, row):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column +1, row):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column -1, row +1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column -1, row +1):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column, row +1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column, row +1):\r\n\t\tneighbors += 1\r\n\t\t\r\n\tif isOutOfBounds(column +1, row +1):\r\n\t\tneighbors += 1\r\n\telif isWall(caveMap, column +1, row +1):\r\n\t\tneighbors += 1\r\n\r\n\treturn neighbors\r\n\t\r\ndef runGeneration (caveMap, generations):\r\n\t#smooth out random noise using modified 4-5 cellular automata rules\r\n\t#the entire process is pretty inefficient - it has to loop through the entire list as many as \r\n\t#(mapWidth * mapHeight * 8) times for potentially millions of comparisons\r\n\ti =0 \r\n\t\r\n\tfor i in range(0, generations):\r\n\t\tstart_time = time.time()\r\n\t\tfor cell in caveMap:\r\n\t\t\tif findNeighbors(caveMap,cell[0],cell[1]) < 3:\r\n\t\t\t\tcell[2] = 0\r\n\t\t\telif findNeighbors(caveMap, cell[0], cell[1]) > 5:\r\n\t\t\t\tcell[2] = 1\r\n\t\tprintCaveMap(caveMap)\r\n\t\tend_time = time.time()\r\n\t\tprint(end_time - start_time, \" seconds\")\r\n\t\r\n\treturn caveMap\r\n\r\n\t\r\ndef printCaveMap(caveMap):\r\n\t#print the map by displaying a grid of characters where # = walls and spaces = floors\r\n\t#just uses mapWidth to insert returns, very agnostic about the column/row of a cell\r\n\t\r\n\ti = 1\r\n\tfor item in caveMap:\r\n\t\tif i == mapWidth + 1:\r\n\t\t\tprint('\\r')\r\n\t\t\ti = 1\r\n\t\tif item[2] == 1:\r\n\t\t\tprint(\" # \", end=\"\")\r\n\t\telse:\r\n\t\t\tprint(\" \", end=\"\")\r\n\t\ti += 1\r\n\t\r\n\tprint(\"\\n\", \"\\n\")\r\n\t\r\ndef main():\r\n\t\t\r\n\tcaveMap = generateNoise()\r\n\trunGeneration(caveMap, 2)\r\n\t\t\r\nif __name__ == \"__main__\":\r\n\tmain()",
"step-ids": [
5,
6,
8,
9,
11
]
}
|
[
5,
6,
8,
9,
11
] |
class Solution(object):
def removeNthFromEnd(self, head, n):
dummy = ListNode(-1)
dummy.next = head
first, second = dummy, dummy
for i in range(n):
first = first.next
while first.next:
first = first.next
second = second.next
second.next = second.next.next
return dummy.next
|
normal
|
{
"blob_id": "7e71c97070285b051b23448c755e3d41b2909dda",
"index": 3884,
"step-1": "<mask token>\n",
"step-2": "class Solution(object):\n <mask token>\n",
"step-3": "class Solution(object):\n\n def removeNthFromEnd(self, head, n):\n dummy = ListNode(-1)\n dummy.next = head\n first, second = dummy, dummy\n for i in range(n):\n first = first.next\n while first.next:\n first = first.next\n second = second.next\n second.next = second.next.next\n return dummy.next\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
import sys
import json
import math
from klpmln import MVPP
dprogram = '''
img(i1). img(i2).
addition(A,B,N) :- digit(A,1,N1), digit(B,1,N2), N=N1+N2.
nn(m(X,1), digit, [0,1,2,3,4,5,6,7,8,9]) :- img(X).
'''
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(1, 6, 5), # 6 is the output chanel size; 5 is the kernal size; 1 (chanel) 28 28 -> 6 24 24
nn.MaxPool2d(2, 2), # kernal size 2; stride size 2; 6 24 24 -> 6 12 12
nn.ReLU(True), # inplace=True means that it will modify the input directly thus save memory
nn.Conv2d(6, 16, 5), # 6 12 12 -> 16 8 8
nn.MaxPool2d(2, 2), # 16 8 8 -> 16 4 4
nn.ReLU(True)
)
self.classifier = nn.Sequential(
nn.Linear(16 * 4 * 4, 120),
nn.ReLU(),
nn.Linear(120, 84),
nn.ReLU(),
nn.Linear(84, 10),
nn.Softmax(1)
)
def forward(self, x):
x = self.encoder(x)
x = x.view(-1, 16 * 4 * 4)
x = self.classifier(x)
# return F.log_softmax(x, dim=1)
return x
def train(args, model, device, train_loader, optimizer, epoch):
model.train()
test = MVPP("programs/mnist.txt")
for batch_idx, (data, target) in enumerate(train_loader):
for inner_iter in range(1):
data, target = data.to(device), target.to(device)
# optimizer.zero_grad()
output = model(data)
# test = MVPP("programs/mnist.txt")
test.parameters = output.tolist()
test.normalize_probs()
# construct observation addition(i1, i2, sum)
value = sum(target.tolist())
observation = ":- not addition(i1,i2,"+ str(value) + ")."
# we calculate gradients with exact computation
gradients = test.gradients_one_obs(observation)
if device.type == 'cuda':
grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)
else:
grad_by_prob = -1 * torch.FloatTensor(gradients)
loss = F.nll_loss(output, target)
output.backward(grad_by_prob, retain_graph=True)
if (batch_idx+1) % args.multiExampleNum == 0 and inner_iter == 0:
optimizer.step()
optimizer.zero_grad()
# optimizer.step()
if batch_idx % args.log_interval == 0 and inner_iter == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
print(observation)
print("Output: {}".format(output.data.tolist()))
print("Gradient: {}".format(grad_by_prob))
def test(args, model, device, test_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
def main():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=2, metavar='N',
help='input batch size for training (default: 2)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=1, metavar='N',
help='number of epochs to train (default: 1)')
parser.add_argument('--lr', type=float, default=0.001, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=1000, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--save-model', action='store_true', default=False,
help='For Saving the current Model')
parser.add_argument('--multiExampleNum', type=int, default=1, metavar='N',
help='input the number of examples whose gradients are accumulated before back-propogation (default: 10)')
args = parser.parse_args()
use_cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=args.test_batch_size, shuffle=True, **kwargs)
model = Net().to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
# optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum)
for epoch in range(1, args.epochs + 1):
train(args, model, device, train_loader, optimizer, epoch)
test(args, model, device, test_loader)
if (args.save_model):
torch.save(model.state_dict(),"mnist_cnn.pt")
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "70b08b9e8c1510a9be48a4bc1de39c6c85b36eed",
"index": 2426,
"step-1": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.encoder = nn.Sequential(nn.Conv2d(1, 6, 5), nn.MaxPool2d(2, 2),\n nn.ReLU(True), nn.Conv2d(6, 16, 5), nn.MaxPool2d(2, 2), nn.ReLU\n (True))\n self.classifier = nn.Sequential(nn.Linear(16 * 4 * 4, 120), nn.ReLU\n (), nn.Linear(120, 84), nn.ReLU(), nn.Linear(84, 10), nn.Softmax(1)\n )\n\n def forward(self, x):\n x = self.encoder(x)\n x = x.view(-1, 16 * 4 * 4)\n x = self.classifier(x)\n return x\n\n\ndef train(args, model, device, train_loader, optimizer, epoch):\n model.train()\n test = MVPP('programs/mnist.txt')\n for batch_idx, (data, target) in enumerate(train_loader):\n for inner_iter in range(1):\n data, target = data.to(device), target.to(device)\n output = model(data)\n test.parameters = output.tolist()\n test.normalize_probs()\n value = sum(target.tolist())\n observation = ':- not addition(i1,i2,' + str(value) + ').'\n gradients = test.gradients_one_obs(observation)\n if device.type == 'cuda':\n grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)\n else:\n grad_by_prob = -1 * torch.FloatTensor(gradients)\n loss = F.nll_loss(output, target)\n output.backward(grad_by_prob, retain_graph=True)\n if (batch_idx + 1) % args.multiExampleNum == 0 and inner_iter == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % args.log_interval == 0 and inner_iter == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.\n format(epoch, batch_idx * len(data), len(train_loader.\n dataset), 100.0 * batch_idx / len(train_loader), loss.\n item()))\n print(observation)\n print('Output: {}'.format(output.data.tolist()))\n print('Gradient: {}'.format(grad_by_prob))\n\n\n<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description='PyTorch MNIST Example')\n parser.add_argument('--batch-size', type=int, default=2, metavar='N',\n help='input batch size for training (default: 2)')\n parser.add_argument('--test-batch-size', type=int, default=1000,\n metavar='N', help='input batch size for testing (default: 1000)')\n parser.add_argument('--epochs', type=int, default=1, metavar='N', help=\n 'number of epochs to train (default: 1)')\n parser.add_argument('--lr', type=float, default=0.001, metavar='LR',\n help='learning rate (default: 0.01)')\n parser.add_argument('--momentum', type=float, default=0.5, metavar='M',\n help='SGD momentum (default: 0.5)')\n parser.add_argument('--no-cuda', action='store_true', default=False,\n help='disables CUDA training')\n parser.add_argument('--seed', type=int, default=1, metavar='S', help=\n 'random seed (default: 1)')\n parser.add_argument('--log-interval', type=int, default=1000, metavar=\n 'N', help='how many batches to wait before logging training status')\n parser.add_argument('--save-model', action='store_true', default=False,\n help='For Saving the current Model')\n parser.add_argument('--multiExampleNum', type=int, default=1, metavar=\n 'N', help=\n 'input the number of examples whose gradients are accumulated before back-propogation (default: 10)'\n )\n args = parser.parse_args()\n use_cuda = not args.no_cuda and torch.cuda.is_available()\n torch.manual_seed(args.seed)\n device = torch.device('cuda' if use_cuda else 'cpu')\n kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}\n train_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=True, download=True, transform=transforms.Compose([transforms\n .ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])),\n batch_size=args.batch_size, shuffle=True, **kwargs)\n test_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=False, transform=transforms.Compose([transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))])), batch_size=args.\n test_batch_size, shuffle=True, **kwargs)\n model = Net().to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n for epoch in range(1, args.epochs + 1):\n train(args, model, device, train_loader, optimizer, epoch)\n test(args, model, device, test_loader)\n if args.save_model:\n torch.save(model.state_dict(), 'mnist_cnn.pt')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.encoder = nn.Sequential(nn.Conv2d(1, 6, 5), nn.MaxPool2d(2, 2),\n nn.ReLU(True), nn.Conv2d(6, 16, 5), nn.MaxPool2d(2, 2), nn.ReLU\n (True))\n self.classifier = nn.Sequential(nn.Linear(16 * 4 * 4, 120), nn.ReLU\n (), nn.Linear(120, 84), nn.ReLU(), nn.Linear(84, 10), nn.Softmax(1)\n )\n\n def forward(self, x):\n x = self.encoder(x)\n x = x.view(-1, 16 * 4 * 4)\n x = self.classifier(x)\n return x\n\n\ndef train(args, model, device, train_loader, optimizer, epoch):\n model.train()\n test = MVPP('programs/mnist.txt')\n for batch_idx, (data, target) in enumerate(train_loader):\n for inner_iter in range(1):\n data, target = data.to(device), target.to(device)\n output = model(data)\n test.parameters = output.tolist()\n test.normalize_probs()\n value = sum(target.tolist())\n observation = ':- not addition(i1,i2,' + str(value) + ').'\n gradients = test.gradients_one_obs(observation)\n if device.type == 'cuda':\n grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)\n else:\n grad_by_prob = -1 * torch.FloatTensor(gradients)\n loss = F.nll_loss(output, target)\n output.backward(grad_by_prob, retain_graph=True)\n if (batch_idx + 1) % args.multiExampleNum == 0 and inner_iter == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % args.log_interval == 0 and inner_iter == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.\n format(epoch, batch_idx * len(data), len(train_loader.\n dataset), 100.0 * batch_idx / len(train_loader), loss.\n item()))\n print(observation)\n print('Output: {}'.format(output.data.tolist()))\n print('Gradient: {}'.format(grad_by_prob))\n\n\ndef test(args, model, device, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n data, target = data.to(device), target.to(device)\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= len(test_loader.dataset)\n print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.\n format(test_loss, correct, len(test_loader.dataset), 100.0 *\n correct / len(test_loader.dataset)))\n\n\ndef main():\n parser = argparse.ArgumentParser(description='PyTorch MNIST Example')\n parser.add_argument('--batch-size', type=int, default=2, metavar='N',\n help='input batch size for training (default: 2)')\n parser.add_argument('--test-batch-size', type=int, default=1000,\n metavar='N', help='input batch size for testing (default: 1000)')\n parser.add_argument('--epochs', type=int, default=1, metavar='N', help=\n 'number of epochs to train (default: 1)')\n parser.add_argument('--lr', type=float, default=0.001, metavar='LR',\n help='learning rate (default: 0.01)')\n parser.add_argument('--momentum', type=float, default=0.5, metavar='M',\n help='SGD momentum (default: 0.5)')\n parser.add_argument('--no-cuda', action='store_true', default=False,\n help='disables CUDA training')\n parser.add_argument('--seed', type=int, default=1, metavar='S', help=\n 'random seed (default: 1)')\n parser.add_argument('--log-interval', type=int, default=1000, metavar=\n 'N', help='how many batches to wait before logging training status')\n parser.add_argument('--save-model', action='store_true', default=False,\n help='For Saving the current Model')\n parser.add_argument('--multiExampleNum', type=int, default=1, metavar=\n 'N', help=\n 'input the number of examples whose gradients are accumulated before back-propogation (default: 10)'\n )\n args = parser.parse_args()\n use_cuda = not args.no_cuda and torch.cuda.is_available()\n torch.manual_seed(args.seed)\n device = torch.device('cuda' if use_cuda else 'cpu')\n kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}\n train_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=True, download=True, transform=transforms.Compose([transforms\n .ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])),\n batch_size=args.batch_size, shuffle=True, **kwargs)\n test_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=False, transform=transforms.Compose([transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))])), batch_size=args.\n test_batch_size, shuffle=True, **kwargs)\n model = Net().to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n for epoch in range(1, args.epochs + 1):\n train(args, model, device, train_loader, optimizer, epoch)\n test(args, model, device, test_loader)\n if args.save_model:\n torch.save(model.state_dict(), 'mnist_cnn.pt')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.encoder = nn.Sequential(nn.Conv2d(1, 6, 5), nn.MaxPool2d(2, 2),\n nn.ReLU(True), nn.Conv2d(6, 16, 5), nn.MaxPool2d(2, 2), nn.ReLU\n (True))\n self.classifier = nn.Sequential(nn.Linear(16 * 4 * 4, 120), nn.ReLU\n (), nn.Linear(120, 84), nn.ReLU(), nn.Linear(84, 10), nn.Softmax(1)\n )\n\n def forward(self, x):\n x = self.encoder(x)\n x = x.view(-1, 16 * 4 * 4)\n x = self.classifier(x)\n return x\n\n\ndef train(args, model, device, train_loader, optimizer, epoch):\n model.train()\n test = MVPP('programs/mnist.txt')\n for batch_idx, (data, target) in enumerate(train_loader):\n for inner_iter in range(1):\n data, target = data.to(device), target.to(device)\n output = model(data)\n test.parameters = output.tolist()\n test.normalize_probs()\n value = sum(target.tolist())\n observation = ':- not addition(i1,i2,' + str(value) + ').'\n gradients = test.gradients_one_obs(observation)\n if device.type == 'cuda':\n grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)\n else:\n grad_by_prob = -1 * torch.FloatTensor(gradients)\n loss = F.nll_loss(output, target)\n output.backward(grad_by_prob, retain_graph=True)\n if (batch_idx + 1) % args.multiExampleNum == 0 and inner_iter == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % args.log_interval == 0 and inner_iter == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.\n format(epoch, batch_idx * len(data), len(train_loader.\n dataset), 100.0 * batch_idx / len(train_loader), loss.\n item()))\n print(observation)\n print('Output: {}'.format(output.data.tolist()))\n print('Gradient: {}'.format(grad_by_prob))\n\n\ndef test(args, model, device, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n data, target = data.to(device), target.to(device)\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= len(test_loader.dataset)\n print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.\n format(test_loss, correct, len(test_loader.dataset), 100.0 *\n correct / len(test_loader.dataset)))\n\n\ndef main():\n parser = argparse.ArgumentParser(description='PyTorch MNIST Example')\n parser.add_argument('--batch-size', type=int, default=2, metavar='N',\n help='input batch size for training (default: 2)')\n parser.add_argument('--test-batch-size', type=int, default=1000,\n metavar='N', help='input batch size for testing (default: 1000)')\n parser.add_argument('--epochs', type=int, default=1, metavar='N', help=\n 'number of epochs to train (default: 1)')\n parser.add_argument('--lr', type=float, default=0.001, metavar='LR',\n help='learning rate (default: 0.01)')\n parser.add_argument('--momentum', type=float, default=0.5, metavar='M',\n help='SGD momentum (default: 0.5)')\n parser.add_argument('--no-cuda', action='store_true', default=False,\n help='disables CUDA training')\n parser.add_argument('--seed', type=int, default=1, metavar='S', help=\n 'random seed (default: 1)')\n parser.add_argument('--log-interval', type=int, default=1000, metavar=\n 'N', help='how many batches to wait before logging training status')\n parser.add_argument('--save-model', action='store_true', default=False,\n help='For Saving the current Model')\n parser.add_argument('--multiExampleNum', type=int, default=1, metavar=\n 'N', help=\n 'input the number of examples whose gradients are accumulated before back-propogation (default: 10)'\n )\n args = parser.parse_args()\n use_cuda = not args.no_cuda and torch.cuda.is_available()\n torch.manual_seed(args.seed)\n device = torch.device('cuda' if use_cuda else 'cpu')\n kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}\n train_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=True, download=True, transform=transforms.Compose([transforms\n .ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])),\n batch_size=args.batch_size, shuffle=True, **kwargs)\n test_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=False, transform=transforms.Compose([transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))])), batch_size=args.\n test_batch_size, shuffle=True, **kwargs)\n model = Net().to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n for epoch in range(1, args.epochs + 1):\n train(args, model, device, train_loader, optimizer, epoch)\n test(args, model, device, test_loader)\n if args.save_model:\n torch.save(model.state_dict(), 'mnist_cnn.pt')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\ndprogram = \"\"\"\nimg(i1). img(i2).\n\naddition(A,B,N) :- digit(A,1,N1), digit(B,1,N2), N=N1+N2.\n\nnn(m(X,1), digit, [0,1,2,3,4,5,6,7,8,9]) :- img(X).\n\"\"\"\n\n\nclass Net(nn.Module):\n\n def __init__(self):\n super(Net, self).__init__()\n self.encoder = nn.Sequential(nn.Conv2d(1, 6, 5), nn.MaxPool2d(2, 2),\n nn.ReLU(True), nn.Conv2d(6, 16, 5), nn.MaxPool2d(2, 2), nn.ReLU\n (True))\n self.classifier = nn.Sequential(nn.Linear(16 * 4 * 4, 120), nn.ReLU\n (), nn.Linear(120, 84), nn.ReLU(), nn.Linear(84, 10), nn.Softmax(1)\n )\n\n def forward(self, x):\n x = self.encoder(x)\n x = x.view(-1, 16 * 4 * 4)\n x = self.classifier(x)\n return x\n\n\ndef train(args, model, device, train_loader, optimizer, epoch):\n model.train()\n test = MVPP('programs/mnist.txt')\n for batch_idx, (data, target) in enumerate(train_loader):\n for inner_iter in range(1):\n data, target = data.to(device), target.to(device)\n output = model(data)\n test.parameters = output.tolist()\n test.normalize_probs()\n value = sum(target.tolist())\n observation = ':- not addition(i1,i2,' + str(value) + ').'\n gradients = test.gradients_one_obs(observation)\n if device.type == 'cuda':\n grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)\n else:\n grad_by_prob = -1 * torch.FloatTensor(gradients)\n loss = F.nll_loss(output, target)\n output.backward(grad_by_prob, retain_graph=True)\n if (batch_idx + 1) % args.multiExampleNum == 0 and inner_iter == 0:\n optimizer.step()\n optimizer.zero_grad()\n if batch_idx % args.log_interval == 0 and inner_iter == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.\n format(epoch, batch_idx * len(data), len(train_loader.\n dataset), 100.0 * batch_idx / len(train_loader), loss.\n item()))\n print(observation)\n print('Output: {}'.format(output.data.tolist()))\n print('Gradient: {}'.format(grad_by_prob))\n\n\ndef test(args, model, device, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n data, target = data.to(device), target.to(device)\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n test_loss /= len(test_loader.dataset)\n print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.\n format(test_loss, correct, len(test_loader.dataset), 100.0 *\n correct / len(test_loader.dataset)))\n\n\ndef main():\n parser = argparse.ArgumentParser(description='PyTorch MNIST Example')\n parser.add_argument('--batch-size', type=int, default=2, metavar='N',\n help='input batch size for training (default: 2)')\n parser.add_argument('--test-batch-size', type=int, default=1000,\n metavar='N', help='input batch size for testing (default: 1000)')\n parser.add_argument('--epochs', type=int, default=1, metavar='N', help=\n 'number of epochs to train (default: 1)')\n parser.add_argument('--lr', type=float, default=0.001, metavar='LR',\n help='learning rate (default: 0.01)')\n parser.add_argument('--momentum', type=float, default=0.5, metavar='M',\n help='SGD momentum (default: 0.5)')\n parser.add_argument('--no-cuda', action='store_true', default=False,\n help='disables CUDA training')\n parser.add_argument('--seed', type=int, default=1, metavar='S', help=\n 'random seed (default: 1)')\n parser.add_argument('--log-interval', type=int, default=1000, metavar=\n 'N', help='how many batches to wait before logging training status')\n parser.add_argument('--save-model', action='store_true', default=False,\n help='For Saving the current Model')\n parser.add_argument('--multiExampleNum', type=int, default=1, metavar=\n 'N', help=\n 'input the number of examples whose gradients are accumulated before back-propogation (default: 10)'\n )\n args = parser.parse_args()\n use_cuda = not args.no_cuda and torch.cuda.is_available()\n torch.manual_seed(args.seed)\n device = torch.device('cuda' if use_cuda else 'cpu')\n kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}\n train_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=True, download=True, transform=transforms.Compose([transforms\n .ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])),\n batch_size=args.batch_size, shuffle=True, **kwargs)\n test_loader = torch.utils.data.DataLoader(datasets.MNIST('../data',\n train=False, transform=transforms.Compose([transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))])), batch_size=args.\n test_batch_size, shuffle=True, **kwargs)\n model = Net().to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n for epoch in range(1, args.epochs + 1):\n train(args, model, device, train_loader, optimizer, epoch)\n test(args, model, device, test_loader)\n if args.save_model:\n torch.save(model.state_dict(), 'mnist_cnn.pt')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "from __future__ import print_function\nimport argparse\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torchvision import datasets, transforms\n\nimport sys\nimport json\nimport math\n\nfrom klpmln import MVPP\n\ndprogram = '''\nimg(i1). img(i2).\n\naddition(A,B,N) :- digit(A,1,N1), digit(B,1,N2), N=N1+N2.\n\nnn(m(X,1), digit, [0,1,2,3,4,5,6,7,8,9]) :- img(X).\n'''\n\nclass Net(nn.Module):\n def __init__(self):\n super(Net, self).__init__()\n self.encoder = nn.Sequential(\n nn.Conv2d(1, 6, 5), # 6 is the output chanel size; 5 is the kernal size; 1 (chanel) 28 28 -> 6 24 24\n nn.MaxPool2d(2, 2), # kernal size 2; stride size 2; 6 24 24 -> 6 12 12\n nn.ReLU(True), # inplace=True means that it will modify the input directly thus save memory\n nn.Conv2d(6, 16, 5), # 6 12 12 -> 16 8 8\n nn.MaxPool2d(2, 2), # 16 8 8 -> 16 4 4\n nn.ReLU(True) \n )\n self.classifier = nn.Sequential(\n nn.Linear(16 * 4 * 4, 120),\n nn.ReLU(),\n nn.Linear(120, 84),\n nn.ReLU(),\n nn.Linear(84, 10),\n nn.Softmax(1)\n )\n\n def forward(self, x):\n x = self.encoder(x)\n x = x.view(-1, 16 * 4 * 4)\n x = self.classifier(x)\n # return F.log_softmax(x, dim=1)\n return x\n\n\n \ndef train(args, model, device, train_loader, optimizer, epoch):\n model.train()\n test = MVPP(\"programs/mnist.txt\")\n for batch_idx, (data, target) in enumerate(train_loader):\n for inner_iter in range(1):\n data, target = data.to(device), target.to(device)\n # optimizer.zero_grad()\n output = model(data)\n\n # test = MVPP(\"programs/mnist.txt\")\n test.parameters = output.tolist()\n test.normalize_probs()\n\n # construct observation addition(i1, i2, sum)\n value = sum(target.tolist())\n observation = \":- not addition(i1,i2,\"+ str(value) + \").\"\n\n # we calculate gradients with exact computation\n gradients = test.gradients_one_obs(observation)\n\n if device.type == 'cuda':\n grad_by_prob = -1 * torch.cuda.FloatTensor(gradients)\n else:\n grad_by_prob = -1 * torch.FloatTensor(gradients)\n\n loss = F.nll_loss(output, target)\n\n output.backward(grad_by_prob, retain_graph=True)\n if (batch_idx+1) % args.multiExampleNum == 0 and inner_iter == 0:\n optimizer.step()\n optimizer.zero_grad()\n # optimizer.step()\n if batch_idx % args.log_interval == 0 and inner_iter == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n epoch, batch_idx * len(data), len(train_loader.dataset),\n 100. * batch_idx / len(train_loader), loss.item()))\n print(observation)\n print(\"Output: {}\".format(output.data.tolist()))\n print(\"Gradient: {}\".format(grad_by_prob))\n\ndef test(args, model, device, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n data, target = data.to(device), target.to(device)\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss\n pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n test_loss /= len(test_loader.dataset)\n\n print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.format(\n test_loss, correct, len(test_loader.dataset),\n 100. * correct / len(test_loader.dataset)))\n\ndef main():\n # Training settings\n parser = argparse.ArgumentParser(description='PyTorch MNIST Example')\n parser.add_argument('--batch-size', type=int, default=2, metavar='N',\n help='input batch size for training (default: 2)')\n parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',\n help='input batch size for testing (default: 1000)')\n parser.add_argument('--epochs', type=int, default=1, metavar='N',\n help='number of epochs to train (default: 1)')\n parser.add_argument('--lr', type=float, default=0.001, metavar='LR',\n help='learning rate (default: 0.01)')\n parser.add_argument('--momentum', type=float, default=0.5, metavar='M',\n help='SGD momentum (default: 0.5)')\n parser.add_argument('--no-cuda', action='store_true', default=False,\n help='disables CUDA training')\n parser.add_argument('--seed', type=int, default=1, metavar='S',\n help='random seed (default: 1)')\n parser.add_argument('--log-interval', type=int, default=1000, metavar='N',\n help='how many batches to wait before logging training status')\n \n parser.add_argument('--save-model', action='store_true', default=False,\n help='For Saving the current Model')\n\n parser.add_argument('--multiExampleNum', type=int, default=1, metavar='N',\n help='input the number of examples whose gradients are accumulated before back-propogation (default: 10)')\n args = parser.parse_args()\n use_cuda = not args.no_cuda and torch.cuda.is_available()\n\n torch.manual_seed(args.seed)\n\n device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n\n kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}\n train_loader = torch.utils.data.DataLoader(\n datasets.MNIST('../data', train=True, download=True,\n transform=transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))\n ])),\n batch_size=args.batch_size, shuffle=True, **kwargs)\n test_loader = torch.utils.data.DataLoader(\n datasets.MNIST('../data', train=False, transform=transforms.Compose([\n transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))\n ])),\n batch_size=args.test_batch_size, shuffle=True, **kwargs)\n\n\n model = Net().to(device)\n optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)\n # optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum)\n\n \n\n\n\n for epoch in range(1, args.epochs + 1):\n train(args, model, device, train_loader, optimizer, epoch)\n test(args, model, device, test_loader)\n\n if (args.save_model):\n torch.save(model.state_dict(),\"mnist_cnn.pt\")\n \nif __name__ == '__main__':\n main()\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
class State:
def __init__(self, id):
self.id = id
def NotinClosed(problem, node): #restituisce 1 se lo stato non è stato già visitato (al netto di controlli sulla depth) è quindi bisogna aggiungerlo
NotVisited = 1
for tuple in problem.closed:
if node.state.id == tuple[0].id and node.depth >= tuple[1]:
NotVisited = 0 #presente nei visited ma selected_node ha maggiore/uguale depth
return NotVisited
|
normal
|
{
"blob_id": "200deda300e39b07e0e558277a340b7ad01c7dee",
"index": 2216,
"step-1": "<mask token>\n",
"step-2": "class State:\n <mask token>\n\n\n<mask token>\n",
"step-3": "class State:\n\n def __init__(self, id):\n self.id = id\n\n\n<mask token>\n",
"step-4": "class State:\n\n def __init__(self, id):\n self.id = id\n\n\ndef NotinClosed(problem, node):\n NotVisited = 1\n for tuple in problem.closed:\n if node.state.id == tuple[0].id and node.depth >= tuple[1]:\n NotVisited = 0\n return NotVisited\n",
"step-5": "\nclass State:\n def __init__(self, id):\n self.id = id\n\n\ndef NotinClosed(problem, node): #restituisce 1 se lo stato non è stato già visitato (al netto di controlli sulla depth) è quindi bisogna aggiungerlo\n NotVisited = 1\n for tuple in problem.closed:\n if node.state.id == tuple[0].id and node.depth >= tuple[1]:\n NotVisited = 0 #presente nei visited ma selected_node ha maggiore/uguale depth\n return NotVisited",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import thinkbayes2 as thinkbayes
from thinkbayes2 import Pmf
import thinkplot
class Dice2(Pmf):
def __init__(self, sides):
Pmf.__init__(self)
for x in range(1, sides + 1):
self.Set(x, 1)
self.Normalize()
if __name__ == "__main__":
d6 = Dice2(6)
dices = [d6] * 6
three = thinkbayes.SampleSum(dices, 1000)
thinkplot.Pmf(three)
|
normal
|
{
"blob_id": "236dd70dec8d53062d6c38c370cb8f11dc5ef9d0",
"index": 556,
"step-1": "<mask token>\n\n\nclass Dice2(Pmf):\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Dice2(Pmf):\n\n def __init__(self, sides):\n Pmf.__init__(self)\n for x in range(1, sides + 1):\n self.Set(x, 1)\n self.Normalize()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Dice2(Pmf):\n\n def __init__(self, sides):\n Pmf.__init__(self)\n for x in range(1, sides + 1):\n self.Set(x, 1)\n self.Normalize()\n\n\nif __name__ == '__main__':\n d6 = Dice2(6)\n dices = [d6] * 6\n three = thinkbayes.SampleSum(dices, 1000)\n thinkplot.Pmf(three)\n",
"step-4": "import thinkbayes2 as thinkbayes\nfrom thinkbayes2 import Pmf\nimport thinkplot\n\n\nclass Dice2(Pmf):\n\n def __init__(self, sides):\n Pmf.__init__(self)\n for x in range(1, sides + 1):\n self.Set(x, 1)\n self.Normalize()\n\n\nif __name__ == '__main__':\n d6 = Dice2(6)\n dices = [d6] * 6\n three = thinkbayes.SampleSum(dices, 1000)\n thinkplot.Pmf(three)\n",
"step-5": "import thinkbayes2 as thinkbayes\nfrom thinkbayes2 import Pmf\nimport thinkplot\n\n\nclass Dice2(Pmf):\n def __init__(self, sides):\n Pmf.__init__(self)\n for x in range(1, sides + 1):\n self.Set(x, 1)\n self.Normalize()\n\n\nif __name__ == \"__main__\":\n d6 = Dice2(6)\n dices = [d6] * 6\n three = thinkbayes.SampleSum(dices, 1000)\n thinkplot.Pmf(three)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/env python
###############################################################################
# \file
#
# $Id:$
#
# Copyright (C) Brno University of Technology
#
# This file is part of software developed by Robo@FIT group.
#
# Author: Tomas Lokaj
# Supervised by: Michal Spanel ([email protected])
# Date: 12/09/2012
#
# This file is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this file. If not, see <http://www.gnu.org/licenses/>.
#
import roslib; roslib.load_manifest('srs_interaction_primitives')
import rospy
import actionlib
from std_msgs.msg import *
from visualization_msgs.msg import *
from geometry_msgs.msg import *
from srs_interaction_primitives.msg import *
import random
import time
from srs_interaction_primitives.srv import ClickablePositions
if __name__ == '__main__':
rospy.init_node('clickable_positions_action_client', anonymous=True)
#===========================================================================
# rospy.wait_for_service('interaction_primitives/clickable_positions')
# click_positions = rospy.ServiceProxy('interaction_primitives/clickable_positions', ClickablePositions)
#
# color = ColorRGBA()
# color.r = random.uniform(0, 1)
# color.g = random.uniform(0, 1)
# color.b = random.uniform(0, 1)
# color.a = 1;
#
# radius = random.uniform(0, 1)
#
# positions = []
# for i in range(0, random.randint(2, 10)):
# positions.append(Point(random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0)))
#
# frame_id = "/world"
#
# topic = str(random.randint(0, 10000))
#
# resp = click_positions(frame_id, topic, radius, color, positions)
#
#===========================================================================
client = actionlib.SimpleActionClient("clickable_positions_server", ClickablePositionsAction)
client.wait_for_server()
rospy.loginfo("Server ready")
goal = ClickablePositionsGoal()
color = ColorRGBA()
color.r = random.uniform(0, 1)
color.g = random.uniform(0, 1)
color.b = random.uniform(0, 1)
color.a = 1;
goal.topic_suffix = str(random.randint(0, 10000))
goal.color = color
goal.radius = random.uniform(0, 1)
for i in range(0, random.randint(2, 10)):
goal.positions.append(Point(random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0)))
goal.frame_id = "/world"
# Fill in the goal here
client.send_goal(goal)
client.wait_for_result(rospy.Duration.from_sec(50.0))
if client.get_state() == 3:
rospy.loginfo("Goal completed:")
print client.get_result()
else:
rospy.logwarn("Action was preempted")
|
normal
|
{
"blob_id": "3bf1b4cfce55820605653d9dc57bab839f2dea55",
"index": 5864,
"step-1": "#!/usr/bin/env python\n###############################################################################\n# \\file\n#\n# $Id:$\n#\n# Copyright (C) Brno University of Technology\n#\n# This file is part of software developed by Robo@FIT group.\n# \n# Author: Tomas Lokaj\n# Supervised by: Michal Spanel ([email protected])\n# Date: 12/09/2012\n#\n# This file is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n# \n# This file is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n# \n# You should have received a copy of the GNU Lesser General Public License\n# along with this file. If not, see <http://www.gnu.org/licenses/>.\n#\n\nimport roslib; roslib.load_manifest('srs_interaction_primitives')\nimport rospy\nimport actionlib\nfrom std_msgs.msg import *\nfrom visualization_msgs.msg import *\nfrom geometry_msgs.msg import *\nfrom srs_interaction_primitives.msg import *\nimport random\nimport time\n\nfrom srs_interaction_primitives.srv import ClickablePositions \n\n\nif __name__ == '__main__':\n rospy.init_node('clickable_positions_action_client', anonymous=True)\n \n #===========================================================================\n # rospy.wait_for_service('interaction_primitives/clickable_positions')\n # click_positions = rospy.ServiceProxy('interaction_primitives/clickable_positions', ClickablePositions)\n # \n # color = ColorRGBA()\n # color.r = random.uniform(0, 1)\n # color.g = random.uniform(0, 1)\n # color.b = random.uniform(0, 1)\n # color.a = 1; \n # \n # radius = random.uniform(0, 1)\n # \n # positions = []\n # for i in range(0, random.randint(2, 10)):\n # positions.append(Point(random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0)))\n # \n # frame_id = \"/world\"\n # \n # topic = str(random.randint(0, 10000))\n # \n # resp = click_positions(frame_id, topic, radius, color, positions)\n # \n #===========================================================================\n \n client = actionlib.SimpleActionClient(\"clickable_positions_server\", ClickablePositionsAction)\n client.wait_for_server()\n rospy.loginfo(\"Server ready\")\n\n goal = ClickablePositionsGoal()\n color = ColorRGBA()\n color.r = random.uniform(0, 1)\n color.g = random.uniform(0, 1)\n color.b = random.uniform(0, 1)\n color.a = 1; \n goal.topic_suffix = str(random.randint(0, 10000))\n goal.color = color\n goal.radius = random.uniform(0, 1)\n for i in range(0, random.randint(2, 10)):\n goal.positions.append(Point(random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0), random.uniform(-10.0, 10.0)))\n goal.frame_id = \"/world\"\n # Fill in the goal here\n client.send_goal(goal)\n client.wait_for_result(rospy.Duration.from_sec(50.0))\n if client.get_state() == 3:\n rospy.loginfo(\"Goal completed:\")\n print client.get_result()\n else:\n rospy.logwarn(\"Action was preempted\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
dummy_head=ListNode(0)
dummy_head.next=head
pre=dummy_head
cur=head
while cur and cur.next:
next=cur.next
next_next=next.next
pre.next=next
next.next=cur
cur.next=next_next
pre=cur
cur=next_next
return dummy_head.next
# !!!!!!!!!!!!!!反转链表套路:
# 虚拟头结点
# 在循环外定义pre cur,在循环内求next和next_next(如果有需要),这样就可以cur and cur.next作为判断while条件
|
normal
|
{
"blob_id": "4afc2ceed860c20af071e1d9ccaca17973cb9a8e",
"index": 7553,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def swapPairs(self, head: ListNode) ->ListNode:\n dummy_head = ListNode(0)\n dummy_head.next = head\n pre = dummy_head\n cur = head\n while cur and cur.next:\n next = cur.next\n next_next = next.next\n pre.next = next\n next.next = cur\n cur.next = next_next\n pre = cur\n cur = next_next\n return dummy_head.next\n",
"step-4": "# Definition for singly-linked list.\n# class ListNode:\n# def __init__(self, val=0, next=None):\n# self.val = val\n# self.next = next\nclass Solution:\n def swapPairs(self, head: ListNode) -> ListNode:\n dummy_head=ListNode(0)\n dummy_head.next=head\n pre=dummy_head\n cur=head\n while cur and cur.next:\n next=cur.next\n next_next=next.next\n pre.next=next\n next.next=cur\n cur.next=next_next\n pre=cur\n cur=next_next\n return dummy_head.next\n# !!!!!!!!!!!!!!反转链表套路: \n# 虚拟头结点\n# 在循环外定义pre cur,在循环内求next和next_next(如果有需要),这样就可以cur and cur.next作为判断while条件\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
subworkflow data:
workdir:
"../../data/SlideSeq/Puck_180819_10"
include: "../Snakefile"
|
normal
|
{
"blob_id": "9847a9cd360649819f51abfe584fb51a81306f68",
"index": 4224,
"step-1": "subworkflow data:\n workdir:\n \"../../data/SlideSeq/Puck_180819_10\"\n\ninclude: \"../Snakefile\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# #----------------------------------------#
# 3.4
#
# Question:
# Write a program which can map() to make a list whose elements are square of elements in [1,2,3,4,5,6,7,8,9,10].
#
|
normal
|
{
"blob_id": "8c71bc5d53bf5c4cb20784659eddf8a97efb86ef",
"index": 8336,
"step-1": "#\t#----------------------------------------#\n#\t3.4\n#\t\n#\tQuestion:\n#\tWrite a program which can map() to make a list whose elements are square of elements in [1,2,3,4,5,6,7,8,9,10].\n#\t\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
1
]
}
|
[
1
] |
"""
Like Places but possibly script based and temporary.
Like a whisper command where is keeps tracks of participants.
"""
|
normal
|
{
"blob_id": "378c07c512425cb6ac6c998eaaa86892b02a37b8",
"index": 6905,
"step-1": "<mask token>\n",
"step-2": "\"\"\"\nLike Places but possibly script based and temporary.\nLike a whisper command where is keeps tracks of participants.\n\"\"\"",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
def play():
print("playing tank games...")
print("runing tank now!!!")
|
normal
|
{
"blob_id": "8c7fe90972feec19e280d3bccd39391af666608a",
"index": 9410,
"step-1": "<mask token>\n",
"step-2": "def play():\n print('playing tank games...')\n\n\n<mask token>\n",
"step-3": "def play():\n print('playing tank games...')\n\n\nprint('runing tank now!!!')\n",
"step-4": "def play():\n print(\"playing tank games...\")\nprint(\"runing tank now!!!\")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from practice.demo4 import paixu
if __name__ == '__main__':
n=int(input("请输入最大的数字范围:"))
paixu(n)
|
normal
|
{
"blob_id": "a777c6d76ef2ae15544a91bcfba0dbeabce0470a",
"index": 5377,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n n = int(input('请输入最大的数字范围:'))\n paixu(n)\n",
"step-3": "from practice.demo4 import paixu\nif __name__ == '__main__':\n n = int(input('请输入最大的数字范围:'))\n paixu(n)\n",
"step-4": "from practice.demo4 import paixu\nif __name__ == '__main__':\n n=int(input(\"请输入最大的数字范围:\"))\n paixu(n)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/local/bin/python3.3
'''
http://projecteuler.net/problem=127()
abc-hits
Problem 127
The radical of n, rad(n), is the product of distinct prime factors of n. For example, 504 = 23 × 32 × 7, so rad(504) = 2 × 3 × 7 = 42.
We shall define the triplet of positive integers (a, b, c) to be an abc-hit if:
GCD(a, b) = GCD(a, c) = GCD(b, c) = 1
a < b
a + b = c
rad(abc) < c
For example, (5, 27, 32) is an abc-hit, because:
GCD(5, 27) = GCD(5, 32) = GCD(27, 32) = 1
5 < 27
5 + 27 = 32
rad(4320) = 30 < 32
It turns out that abc-hits are quite rare and there are only thirty-one abc-hits for c < 1000, with ∑c = 12523.
Find ∑c for c < 120000.
'''
'''
Notes on problem 127():
Very slow
'''
from PE_factors import genFactors
from PE_basic import product
def problem127():
GOAL = 120000
rad = {} # rad[6] = {2,3}, radn[8] = {2}
for primes in genFactors(GOAL):
rad[product(primes)] = (set(primes), product(set(primes)))
def relprime(s, t):
return s & t == set()
found = 0
total = 0
for b in range(1, GOAL):
for a in range(1, min(b, GOAL - b)):
c = a + b
x, y, z = rad[a], rad[b], rad[c]
if x[0] & y[0] != set():
continue
if x[1] * y[1] * z[1] < c:
found += 1
total += c
return total
if __name__ == "__main__":
print(problem127() == 18407904)
|
normal
|
{
"blob_id": "646f6a0afc3dc129250c26270dda4355b8cea080",
"index": 1003,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef problem127():\n GOAL = 120000\n rad = {}\n for primes in genFactors(GOAL):\n rad[product(primes)] = set(primes), product(set(primes))\n\n def relprime(s, t):\n return s & t == set()\n found = 0\n total = 0\n for b in range(1, GOAL):\n for a in range(1, min(b, GOAL - b)):\n c = a + b\n x, y, z = rad[a], rad[b], rad[c]\n if x[0] & y[0] != set():\n continue\n if x[1] * y[1] * z[1] < c:\n found += 1\n total += c\n return total\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef problem127():\n GOAL = 120000\n rad = {}\n for primes in genFactors(GOAL):\n rad[product(primes)] = set(primes), product(set(primes))\n\n def relprime(s, t):\n return s & t == set()\n found = 0\n total = 0\n for b in range(1, GOAL):\n for a in range(1, min(b, GOAL - b)):\n c = a + b\n x, y, z = rad[a], rad[b], rad[c]\n if x[0] & y[0] != set():\n continue\n if x[1] * y[1] * z[1] < c:\n found += 1\n total += c\n return total\n\n\nif __name__ == '__main__':\n print(problem127() == 18407904)\n",
"step-4": "<mask token>\nfrom PE_factors import genFactors\nfrom PE_basic import product\n\n\ndef problem127():\n GOAL = 120000\n rad = {}\n for primes in genFactors(GOAL):\n rad[product(primes)] = set(primes), product(set(primes))\n\n def relprime(s, t):\n return s & t == set()\n found = 0\n total = 0\n for b in range(1, GOAL):\n for a in range(1, min(b, GOAL - b)):\n c = a + b\n x, y, z = rad[a], rad[b], rad[c]\n if x[0] & y[0] != set():\n continue\n if x[1] * y[1] * z[1] < c:\n found += 1\n total += c\n return total\n\n\nif __name__ == '__main__':\n print(problem127() == 18407904)\n",
"step-5": "#!/usr/local/bin/python3.3\n\n'''\nhttp://projecteuler.net/problem=127()\nabc-hits\nProblem 127\nThe radical of n, rad(n), is the product of distinct prime factors of n. For example, 504 = 23 × 32 × 7, so rad(504) = 2 × 3 × 7 = 42.\n\nWe shall define the triplet of positive integers (a, b, c) to be an abc-hit if:\n\nGCD(a, b) = GCD(a, c) = GCD(b, c) = 1\na < b\na + b = c\nrad(abc) < c\nFor example, (5, 27, 32) is an abc-hit, because:\n\nGCD(5, 27) = GCD(5, 32) = GCD(27, 32) = 1\n5 < 27\n5 + 27 = 32\nrad(4320) = 30 < 32\nIt turns out that abc-hits are quite rare and there are only thirty-one abc-hits for c < 1000, with ∑c = 12523.\n\nFind ∑c for c < 120000.\n'''\n\n'''\nNotes on problem 127():\nVery slow\n'''\n\nfrom PE_factors import genFactors\nfrom PE_basic import product\n\ndef problem127():\n GOAL = 120000\n\n rad = {} # rad[6] = {2,3}, radn[8] = {2}\n for primes in genFactors(GOAL):\n rad[product(primes)] = (set(primes), product(set(primes)))\n\n def relprime(s, t):\n return s & t == set()\n\n found = 0\n total = 0\n for b in range(1, GOAL):\n for a in range(1, min(b, GOAL - b)):\n c = a + b\n x, y, z = rad[a], rad[b], rad[c]\n if x[0] & y[0] != set():\n continue\n if x[1] * y[1] * z[1] < c:\n found += 1\n total += c\n return total\n\n\nif __name__ == \"__main__\":\n print(problem127() == 18407904)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import time
import json
import os
class OitYitikuscrapyDataPipeline(object):
def open_spider(self, spider):
path ='D:\\xiti10001\\data\\{}\\'.format(time.strftime("%Y%m%d",time.localtime()))
# path = 'd:\\OITData\\zujuan\\{0}\\{1}\\'.format(time.strftime("%Y%m%d", time.localtime()), spider.name)
isExists = os.path.exists(path)
if isExists:
pass
else:
os.makedirs(path)
self.file = codecs.open(path + spider.name+'.json', 'a', encoding='utf-8')
def process_item(self, item, spider):
print('进程打印信息:',spider.name)
lines = json.dumps(dict(item), ensure_ascii=False) + '\n'
self.file.write(lines)
return item
def close_spider(self, spider):
self.file.close()
|
normal
|
{
"blob_id": "315996a783d7b95fd87374a8fe2602a572de071e",
"index": 3495,
"step-1": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-4": "import codecs\nimport time\nimport json\nimport os\n\n\nclass OitYitikuscrapyDataPipeline(object):\n\n def open_spider(self, spider):\n path = 'D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime('%Y%m%d',\n time.localtime()))\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name + '.json', 'a', encoding\n ='utf-8')\n\n def process_item(self, item, spider):\n print('进程打印信息:', spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\nimport codecs\nimport time\nimport json\nimport os\n\nclass OitYitikuscrapyDataPipeline(object):\n def open_spider(self, spider):\n path ='D:\\\\xiti10001\\\\data\\\\{}\\\\'.format(time.strftime(\"%Y%m%d\",time.localtime()))\n # path = 'd:\\\\OITData\\\\zujuan\\\\{0}\\\\{1}\\\\'.format(time.strftime(\"%Y%m%d\", time.localtime()), spider.name)\n isExists = os.path.exists(path)\n if isExists:\n pass\n else:\n os.makedirs(path)\n self.file = codecs.open(path + spider.name+'.json', 'a', encoding='utf-8')\n def process_item(self, item, spider):\n print('进程打印信息:',spider.name)\n lines = json.dumps(dict(item), ensure_ascii=False) + '\\n'\n self.file.write(lines)\n return item\n\n def close_spider(self, spider):\n self.file.close()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from __future__ import print_function
import math
import db
from db import writer
from enum import Enum
from Definitions.Graph import Task
class Constraint(Enum):
deadline = 1
budget = 2
none = 3
def f_range(x, y, jump):
while x < y:
yield x
x += jump
class TaskSchedule:
def __init__(self, task, est=-1, runtime=-1, eft=-1, resource=-1):
self.task = task
self.EFT = eft
self.EST = est
self.runtime = runtime
self.resource = resource
class Resources(object):
len = -1
bandwidth = 0
def __init__(self, powers, bandwidth): # e.g. [1,1,2,2,4]
number_of_resources = len(powers)
self.power = powers
self.tasksOfResource = [] # ordered set of TaskSchedule objects in every resource
for i in range(number_of_resources):
self.tasksOfResource.append([])
self.len = number_of_resources
self.bandwidth = bandwidth
self.job_task_schedule = {} # job_task_schedule['Mine_10_1'][4].EFT == 12
def find_gap(self, resource, start_time, runtime):
'''
finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource
if resource is -1, it does nothing (returns the given start time, and -1 for place)
'''
if resource == -1:
return start_time, -1
number_of_tasks = len(self.tasksOfResource[resource])
if number_of_tasks == 0:
return start_time, 0
elif self.tasksOfResource[resource][0].EST >= start_time + runtime:
return start_time, 0
elif number_of_tasks == 1:
if self.tasksOfResource[resource][0].EFT < start_time:
return start_time, 1
else:
return self.tasksOfResource[resource][0].EFT, 1
else:
for i in range(1, number_of_tasks):
if self.tasksOfResource[resource][i].EST <= start_time:
continue
elif start_time < self.tasksOfResource[resource][i - 1].EFT:
gap = self.tasksOfResource[resource][i].EST - self.tasksOfResource[resource][i - 1].EFT
if gap < runtime:
continue
else:
return self.tasksOfResource[resource][i - 1].EFT, i
elif self.tasksOfResource[resource][i - 1].EFT <= start_time < self.tasksOfResource[resource][i].EST:
if self.tasksOfResource[resource][i].EST - start_time < runtime:
continue
else:
return start_time, i
else: # no gap is found, put it at the end (it can be done using append method)
return max(self.tasksOfResource[resource][-1].EFT, start_time), -1
def calculate_eft(self, task, resource_id, arrival_time=0):
g = task.graph
if resource_id == -1:
graphs_task_on_resource = []
task_runtime_on_resource = task.weight / max(self.power)
else:
task_runtime_on_resource = task.weight / self.power[resource_id]
graphs_task_on_resource = list(
map(lambda t: t.task.id if t.task.graph.name == g.name else -1, self.tasksOfResource[resource_id]))
max_est_of_task = arrival_time
for p in task.predecessor:
# check if p and task.id on the same resource_id
if p in graphs_task_on_resource:
communication_delay = 0
else:
communication_delay = task.predecessor[p] / self.bandwidth
if g.name not in self.job_task_schedule or p not in self.job_task_schedule[g.name]:
continue
p_eft = self.job_task_schedule[g.name][p].EFT
if p_eft + communication_delay > max_est_of_task:
max_est_of_task = p_eft + communication_delay
# EST Of Task is found and stored in max_est_of_task
# Find a gap to schedule it:
start_time, place_id = self.find_gap(resource_id, max_est_of_task, task_runtime_on_resource)
eft_task = start_time + task_runtime_on_resource
return start_time, eft_task, task_runtime_on_resource, place_id
def schedule(self, task_schedule, place_id=-1):
"""
Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.
:type task_schedule: TaskSchedule
:type place_id: int
"""
resource = task_schedule.resource
if place_id == -1:
self.tasksOfResource[resource].append(task_schedule)
else:
self.tasksOfResource[resource].insert(place_id, task_schedule)
if task_schedule.task.graph.name in self.job_task_schedule:
pass
else:
self.job_task_schedule[task_schedule.task.graph.name] = {}
self.job_task_schedule[task_schedule.task.graph.name][task_schedule.task.id] = task_schedule
def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):
result = []
for r in range(0, self.len):
names = []
est = []
eft = []
def add_entries(x):
if job_id != -1 and x.task.graph.name != job_id:
return
names.append(x.task.id if job_id != -1 else f'{x.task.graph.name}-{x.task.id}')
est.append(x.EST)
eft.append(x.EFT)
list(map(add_entries, self.tasksOfResource[r]))
result.append((names, est, eft))
def print_list(x):
if not print_enabled:
return
first = True
for e in x:
if first:
first = False
else:
print(',', end=' ')
print(e, end=' ')
print()
print_list(names)
print_list(est)
print_list(eft)
if finishing is not None and print_enabled:
print(finishing)
return result
def write_schedule(self, db_file, test_name='N/A', extra='single', policy='', job_count=1):
w = writer.Writer(db_file)
w.create_plan()
w.create_plan_head()
unique_jobs_id = w.write_plan_head(test_name, policy, job_count)
def add_entries(x):
job_name, job_type, task_id, jobs_id, start_time,\
finish_time, resource_id, resource_speed, \
job_component_id, extra_params = x.task.graph.name, x.task.graph.type, x.task.id, unique_jobs_id\
, x.EST,\
x.EFT, r, self.power[r], policy, extra
w.write_plan(job_name, job_type, task_id, jobs_id, start_time, finish_time, resource_id,
resource_speed, job_component_id, extra_params)
for r in range(0, self.len):
list(map(add_entries, self.tasksOfResource[r]))
w.commit()
w.close()
@property
def average_power(self):
return math.fsum(self.power) / self.len
@property
def makespan(self):
eft = 0
for i in range(0, self.len):
tasks_in_resource = self.tasksOfResource[i]
if len(tasks_in_resource) == 0:
continue
eft = max(eft, tasks_in_resource[-1].EFT)
return eft
def sum_gaps_resource(self, resource_id):
tasks_in_current_resource = self.tasksOfResource[resource_id]
num_tasks = len(tasks_in_current_resource)
if num_tasks <= 1:
return 0
sum_gaps = 0
for i in range(1, num_tasks):
if tasks_in_current_resource[i - 1].task.dummy_task or tasks_in_current_resource[i].task.dummy_task:
continue
finish_prev = tasks_in_current_resource[i - 1].EFT
start_current = tasks_in_current_resource[i].EST
gap_length = start_current - finish_prev
if gap_length < 0:
raise Exception('Schedule is not correct, check gaps!')
sum_gaps += gap_length
return sum_gaps
@property
def sum_internal_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_gaps_resource(r)
return sum_gaps
def select_resource(self, task, arrival_time=0):
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = -1, -1, -1, -1, -1
for r in range(0, self.len):
max_est_of_task, eft_task, task_runtime_on_resource, place_id = self.calculate_eft(task, r, arrival_time=arrival_time)
if eft_best == -1 or eft_task < eft_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = \
max_est_of_task, eft_task, task_runtime_on_resource, place_id, r
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best
def get_fastest_empty_resource(self):
for r in range(self.len - 1, -1, -1):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
class CostAwareResources(Resources):
def __init__(self, powers, prices, timeslot_len, bandwidth):
super(CostAwareResources, self).__init__(powers, bandwidth)
self.timeslot = timeslot_len
self.price = prices
self.head_nodes = {}
self.sum_weight_scheduled = {}
def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True):
"""
computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.
:param resource_id:
:param start_time:
:param eft:
:param cost_only:
:return:
"""
tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if not t.task.dummy_task]
if not tasks_in_resource:
if eft == -1:
return 0 if cost_only else (0, 0, 0)
else:
return math.ceil((eft - start_time) / self.timeslot[resource_id]) * self.price[resource_id]
if start_time != -1:
task_start_time = min(tasks_in_resource[0].EST, start_time)
else:
task_start_time = tasks_in_resource[0].EST
task_finish_time = max(tasks_in_resource[-1].EFT, eft)
reservation = task_finish_time - task_start_time
cost = math.ceil(reservation / self.timeslot[resource_id]) * self.price[resource_id]
timeslot = self.timeslot[resource_id]
startof = [x.EST for x in tasks_in_resource]
endof = [x.EFT for x in tasks_in_resource]
if start_time != -1:
startof.append(start_time)
endof.append(eft)
startof.sort()
endof.sort()
timeslot_start = min(startof)
last_finish_time = max(endof)
current_task_id = 0
rent_periods = []
while timeslot_start < last_finish_time:
task_len = endof[current_task_id] - timeslot_start
time_slot_finish = endof[current_task_id] + (timeslot - (task_len % timeslot)) % timeslot
current_task_id += 1
if current_task_id >= len(startof):
rent_periods.append((timeslot_start, time_slot_finish))
break
if startof[current_task_id] <= time_slot_finish:
pass
else:
rent_periods.append((timeslot_start, time_slot_finish))
timeslot_start = startof[current_task_id]
sum = 0
for rp in rent_periods:
sum += (rp[1] - rp[0])
cost = sum / timeslot * self.price[resource_id]
if cost_only:
return cost
else:
return cost, min(startof), (max(endof))
def resource_start_time(self, resource_id):
tasks_in_resource = self.tasksOfResource[resource_id]
length = len(tasks_in_resource)
start_index = 0
while length > 0 and tasks_in_resource[start_index].task.dummy_task:
start_index += 1
length -= 1
if length == 0:
return -1
return tasks_in_resource[start_index].EST
@property
def plan_cost(self):
cost = 0
for i in range(0, self.len):
cost += self.resource_cost(i)
return cost
def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft, resource_id, task_id=None):
timeslot_end = timeslot_start + self.timeslot[resource_id]
if ft <= timeslot_start or est >= timeslot_end:
return 0
tasks = self.tasksOfResource[resource_id]
task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)
sum_w = 0
for id in task_ids:
if task_id == id:
continue
start_time = tasks[id].EST
finish_time = tasks[id].EFT
if start_time < timeslot_start:
start_time = timeslot_start
if finish_time > timeslot_end:
finish_time = timeslot_end
sum_w += finish_time - start_time
if est < timeslot_start:
est = timeslot_start
if ft > timeslot_end:
ft = timeslot_end
if ft == est:
return 0
share = float(ft - est) / (sum_w + ft - est)
return share * self.price[resource_id]
def task_id_in_timeslot(self, resource_id, timeslot_start):
timeslot_end = timeslot_start + self.timeslot[resource_id]
task_ids = []
for id in range(len(self.tasksOfResource[resource_id])):
s = self.tasksOfResource[resource_id][id]
if timeslot_start <= s.EST <= timeslot_end or timeslot_start <= s.EFT <= timeslot_end \
or s.EST < timeslot_start and timeslot_end < s.EFT:
task_ids.append(id)
return task_ids
def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1, task_id=None):
if task_id is not None:
# this task has already been scheduled
est = self.tasksOfResource[resource_id][task_id].EST
ft = self.tasksOfResource[resource_id][task_id].EFT
timeslot_len = self.timeslot[resource_id]
resource_start_time = self.resource_start_time(resource_id)
if resource_start_time == -1:
resource_start_time = est
timeslot_start = float(timeslot_len) * math.floor((est - resource_start_time) /
timeslot_len) + resource_start_time
timeslot_end = float(timeslot_len) * math.ceil((ft - resource_start_time) /
timeslot_len) + resource_start_time
shared_cost = 0
for interval in f_range(timeslot_start, timeslot_end + timeslot_len / 2, timeslot_len):
share_in_interval = self.calculate_shared_cost_within_timeslot(interval, est, ft, resource_id, task_id)
shared_cost += share_in_interval
return shared_cost
def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):
"""
calculates eft and cost of a certain task on a certain resource.
:param task:Definitions.Task()
:param resource_id:
:return:
"""
start_time, eft, runtime_on_resource, place_id = self.calculate_eft(task, resource_id, arrival_time=arrival_time)
if task.dummy_task:
return start_time, eft, runtime_on_resource, place_id, 0
else:
cost = self.calculate_share_cost_change(resource_id, start_time, eft, task.graph.name, True)
return start_time, eft, runtime_on_resource, place_id, cost
def sum_external_gaps_resource(self, r):
c, s, e = self.resource_cost(r, cost_only=False)
reservation = e - s
timeslot = self.timeslot[r]
gap = timeslot - reservation % timeslot
if gap == timeslot:
return 0
else:
return gap
@property
def sum_external_gaps(self):
sum_gaps = 0
for r in range(0, self.len):
sum_gaps += self.sum_external_gaps_resource(r)
return sum_gaps
@property
def sum_gaps(self):
return self.sum_internal_gaps + self.sum_external_gaps
@property
def occupied_resources(self):
counter = 0
for i in range(self.len):
if self.resource_cost(i) != 0:
counter += self.price[i]
return counter
@property
def gap_rate(self):
return self.sum_gaps / self.makespan / self.occupied_resources
def select_resource(self, task=Task(), test=None, arrival_time=0):
eft_best = -1
def something_found():
return eft_best != -1
if task.asap is not None:
if not task.asap: # budget workflow
if not test:
print('', end='')
# fastest affordable
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
-1, -1, -1, -1, -1, -1
for r in range(0, self.len):
start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)
if not something_found() or \
eft < eft_best and task.sub_deadline < eft_best or \
task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and \
(eft_best > task.sub_deadline or cost_best > task.sub_budget) or \
eft <= task.sub_deadline and cost <= task.sub_budget and eft < eft_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and eft == eft_best and cost < cost_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
start_time, eft, runtime_on_resource, place_id, r, cost
continue
if not test:
print('', end='')
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best
elif task.asap: # deadline workflow
# cheapest before sub-deadline
if not test:
print('', end='')
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
-1, -1, -1, -1, -1, -1
for r in range(0, self.len):
start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)
# if eft_best == -1 or eft_best > eft > task.sub_deadline or task.sub_deadline >= eft and (
# cost < cost_best or eft_best > task.sub_deadline):
if not something_found() or \
eft < eft_best and task.sub_deadline < eft_best or \
task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and \
(eft_best > task.sub_deadline or cost_best > task.sub_budget) or \
eft <= task.sub_deadline and cost <= task.sub_budget and cost < cost_best or \
eft <= task.sub_deadline and cost <= task.sub_budget and cost == cost_best and eft < eft_best:
est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
start_time, eft, runtime_on_resource, place_id, r, cost
# if cost_best == -1 or cost_best > cost > task.sub_budget or task.sub_budget >= cost and (
# eft < eft_best or cost_best > task.sub_budget):
# est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \
# start_time, eft, runtime_on_resource, place_id, r, cost
continue
if not test:
print('', end='')
return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best
else:
# minimize time (as in HEFT) TODO: it doesn't return cost (as the sixth return value)
return super(CostAwareResources, self).select_resource(task)
def price_of_each_graph(self):
graph_names = self.job_task_schedule.keys()
costs = {}
for name in graph_names:
costs[name] = 0
for r in range(self.len):
for id in range(len(self.tasksOfResource[r])):
name = self.tasksOfResource[r][id].task.graph.name
cost = self.calculate_task_shared_cost(resource_id=r, task_id=id)
costs[name] += cost
return costs
def get_cheapest_empty_resource(self):
for r in range(self.len):
if len(self.tasksOfResource[r]) == 0:
return r
else:
return -1
def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):
super(CostAwareResources, self).schedule(task_schedule, place_id)
# head_node computations:
if not do_head_nodes:
return
if task_schedule.task.graph.name in self.head_nodes:
prev_heads = self.head_nodes[task_schedule.task.graph.name]
parents_of_current_task = task_schedule.task.predecessor.keys()
self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[task_schedule.task.graph.name].difference(
parents_of_current_task)
self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)
else:
self.head_nodes[task_schedule.task.graph.name] = set()
self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)
self.sum_weight_scheduled[task_schedule.task.graph.name] = 0
self.sum_weight_scheduled[task_schedule.task.graph.name] += task_schedule.task.weight
def calculate_share_cost_change(self, resource_id, est=-1, eft=-1, job_id=-1, only_this_job=False):
sum_w = {}
for i in range(len(self.tasksOfResource[resource_id])):
sch = self.tasksOfResource[resource_id][i]
job = sch.task.graph.name
if job not in sum_w:
sum_w[job] = 0
sum_w[job] += sch.EFT - sch.EST
sum_w_all_old = sum(sum_w.values())
prev_cost_resource = self.resource_cost(resource_id)
prev_cost_job = {}
for j in sum_w.keys():
if sum_w_all_old == 0:
prev_cost_job[j] = 0
else:
prev_cost_job[j] = float(prev_cost_resource) * sum_w[j] / sum_w_all_old
if est == -1:
return prev_cost_job
new_cost_resource = self.resource_cost(resource_id, start_time=est, eft=eft)
if job_id not in sum_w:
sum_w[job_id] = 0
sum_w[job_id] += eft - est
sum_w_all_new = sum_w_all_old + eft - est
new_cost_job = {}
changes = {}
for j in sum_w.keys():
if sum_w_all_new == 0:
new_cost_job[j] = 0
else:
new_cost_job[j] = float(new_cost_resource) * sum_w[j] / sum_w_all_new
if j not in prev_cost_job:
changes[j] = new_cost_job[j]
else:
changes[j] = new_cost_job[j] - prev_cost_job[j]
if only_this_job:
return changes[job_id]
return changes
|
normal
|
{
"blob_id": "567076af26b8c93c68647103aeddf43aeb24db13",
"index": 2054,
"step-1": "<mask token>\n\n\nclass Resources(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def makespan(self):\n eft = 0\n for i in range(0, self.len):\n tasks_in_resource = self.tasksOfResource[i]\n if len(tasks_in_resource) == 0:\n continue\n eft = max(eft, tasks_in_resource[-1].EFT)\n return eft\n <mask token>\n\n @property\n def sum_internal_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_gaps_resource(r)\n return sum_gaps\n <mask token>\n <mask token>\n\n\nclass CostAwareResources(Resources):\n\n def __init__(self, powers, prices, timeslot_len, bandwidth):\n super(CostAwareResources, self).__init__(powers, bandwidth)\n self.timeslot = timeslot_len\n self.price = prices\n self.head_nodes = {}\n self.sum_weight_scheduled = {}\n\n def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True\n ):\n \"\"\"\n computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.\n :param resource_id:\n :param start_time:\n :param eft:\n :param cost_only:\n :return:\n \"\"\"\n tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if\n not t.task.dummy_task]\n if not tasks_in_resource:\n if eft == -1:\n return 0 if cost_only else (0, 0, 0)\n else:\n return math.ceil((eft - start_time) / self.timeslot[\n resource_id]) * self.price[resource_id]\n if start_time != -1:\n task_start_time = min(tasks_in_resource[0].EST, start_time)\n else:\n task_start_time = tasks_in_resource[0].EST\n task_finish_time = max(tasks_in_resource[-1].EFT, eft)\n reservation = task_finish_time - task_start_time\n cost = math.ceil(reservation / self.timeslot[resource_id]\n ) * self.price[resource_id]\n timeslot = self.timeslot[resource_id]\n startof = [x.EST for x in tasks_in_resource]\n endof = [x.EFT for x in tasks_in_resource]\n if start_time != -1:\n startof.append(start_time)\n endof.append(eft)\n startof.sort()\n endof.sort()\n timeslot_start = min(startof)\n last_finish_time = max(endof)\n current_task_id = 0\n rent_periods = []\n while timeslot_start < last_finish_time:\n task_len = endof[current_task_id] - timeslot_start\n time_slot_finish = endof[current_task_id] + (timeslot - \n task_len % timeslot) % timeslot\n current_task_id += 1\n if current_task_id >= len(startof):\n rent_periods.append((timeslot_start, time_slot_finish))\n break\n if startof[current_task_id] <= time_slot_finish:\n pass\n else:\n rent_periods.append((timeslot_start, time_slot_finish))\n timeslot_start = startof[current_task_id]\n sum = 0\n for rp in rent_periods:\n sum += rp[1] - rp[0]\n cost = sum / timeslot * self.price[resource_id]\n if cost_only:\n return cost\n else:\n return cost, min(startof), max(endof)\n\n def resource_start_time(self, resource_id):\n tasks_in_resource = self.tasksOfResource[resource_id]\n length = len(tasks_in_resource)\n start_index = 0\n while length > 0 and tasks_in_resource[start_index].task.dummy_task:\n start_index += 1\n length -= 1\n if length == 0:\n return -1\n return tasks_in_resource[start_index].EST\n\n @property\n def plan_cost(self):\n cost = 0\n for i in range(0, self.len):\n cost += self.resource_cost(i)\n return cost\n\n def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,\n resource_id, task_id=None):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n if ft <= timeslot_start or est >= timeslot_end:\n return 0\n tasks = self.tasksOfResource[resource_id]\n task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)\n sum_w = 0\n for id in task_ids:\n if task_id == id:\n continue\n start_time = tasks[id].EST\n finish_time = tasks[id].EFT\n if start_time < timeslot_start:\n start_time = timeslot_start\n if finish_time > timeslot_end:\n finish_time = timeslot_end\n sum_w += finish_time - start_time\n if est < timeslot_start:\n est = timeslot_start\n if ft > timeslot_end:\n ft = timeslot_end\n if ft == est:\n return 0\n share = float(ft - est) / (sum_w + ft - est)\n return share * self.price[resource_id]\n\n def task_id_in_timeslot(self, resource_id, timeslot_start):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n task_ids = []\n for id in range(len(self.tasksOfResource[resource_id])):\n s = self.tasksOfResource[resource_id][id]\n if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=\n s.EFT <= timeslot_end or s.EST < timeslot_start and \n timeslot_end < s.EFT):\n task_ids.append(id)\n return task_ids\n\n def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,\n task_id=None):\n if task_id is not None:\n est = self.tasksOfResource[resource_id][task_id].EST\n ft = self.tasksOfResource[resource_id][task_id].EFT\n timeslot_len = self.timeslot[resource_id]\n resource_start_time = self.resource_start_time(resource_id)\n if resource_start_time == -1:\n resource_start_time = est\n timeslot_start = float(timeslot_len) * math.floor((est -\n resource_start_time) / timeslot_len) + resource_start_time\n timeslot_end = float(timeslot_len) * math.ceil((ft -\n resource_start_time) / timeslot_len) + resource_start_time\n shared_cost = 0\n for interval in f_range(timeslot_start, timeslot_end + timeslot_len /\n 2, timeslot_len):\n share_in_interval = self.calculate_shared_cost_within_timeslot(\n interval, est, ft, resource_id, task_id)\n shared_cost += share_in_interval\n return shared_cost\n\n def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):\n \"\"\"\n calculates eft and cost of a certain task on a certain resource.\n :param task:Definitions.Task()\n :param resource_id:\n :return:\n \"\"\"\n start_time, eft, runtime_on_resource, place_id = self.calculate_eft(\n task, resource_id, arrival_time=arrival_time)\n if task.dummy_task:\n return start_time, eft, runtime_on_resource, place_id, 0\n else:\n cost = self.calculate_share_cost_change(resource_id, start_time,\n eft, task.graph.name, True)\n return start_time, eft, runtime_on_resource, place_id, cost\n\n def sum_external_gaps_resource(self, r):\n c, s, e = self.resource_cost(r, cost_only=False)\n reservation = e - s\n timeslot = self.timeslot[r]\n gap = timeslot - reservation % timeslot\n if gap == timeslot:\n return 0\n else:\n return gap\n\n @property\n def sum_external_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_external_gaps_resource(r)\n return sum_gaps\n\n @property\n def sum_gaps(self):\n return self.sum_internal_gaps + self.sum_external_gaps\n\n @property\n def occupied_resources(self):\n counter = 0\n for i in range(self.len):\n if self.resource_cost(i) != 0:\n counter += self.price[i]\n return counter\n\n @property\n def gap_rate(self):\n return self.sum_gaps / self.makespan / self.occupied_resources\n\n def select_resource(self, task=Task(), test=None, arrival_time=0):\n eft_best = -1\n\n def something_found():\n return eft_best != -1\n if task.asap is not None:\n if not task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and eft <\n eft_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and eft == eft_best and cost <\n cost_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n elif task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and cost == cost_best and eft <\n eft_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n else:\n return super(CostAwareResources, self).select_resource(task)\n\n def price_of_each_graph(self):\n graph_names = self.job_task_schedule.keys()\n costs = {}\n for name in graph_names:\n costs[name] = 0\n for r in range(self.len):\n for id in range(len(self.tasksOfResource[r])):\n name = self.tasksOfResource[r][id].task.graph.name\n cost = self.calculate_task_shared_cost(resource_id=r,\n task_id=id)\n costs[name] += cost\n return costs\n\n def get_cheapest_empty_resource(self):\n for r in range(self.len):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):\n super(CostAwareResources, self).schedule(task_schedule, place_id)\n if not do_head_nodes:\n return\n if task_schedule.task.graph.name in self.head_nodes:\n prev_heads = self.head_nodes[task_schedule.task.graph.name]\n parents_of_current_task = task_schedule.task.predecessor.keys()\n self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[\n task_schedule.task.graph.name].difference(\n parents_of_current_task)\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n else:\n self.head_nodes[task_schedule.task.graph.name] = set()\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n self.sum_weight_scheduled[task_schedule.task.graph.name] = 0\n self.sum_weight_scheduled[task_schedule.task.graph.name\n ] += task_schedule.task.weight\n\n def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,\n job_id=-1, only_this_job=False):\n sum_w = {}\n for i in range(len(self.tasksOfResource[resource_id])):\n sch = self.tasksOfResource[resource_id][i]\n job = sch.task.graph.name\n if job not in sum_w:\n sum_w[job] = 0\n sum_w[job] += sch.EFT - sch.EST\n sum_w_all_old = sum(sum_w.values())\n prev_cost_resource = self.resource_cost(resource_id)\n prev_cost_job = {}\n for j in sum_w.keys():\n if sum_w_all_old == 0:\n prev_cost_job[j] = 0\n else:\n prev_cost_job[j] = float(prev_cost_resource) * sum_w[j\n ] / sum_w_all_old\n if est == -1:\n return prev_cost_job\n new_cost_resource = self.resource_cost(resource_id, start_time=est,\n eft=eft)\n if job_id not in sum_w:\n sum_w[job_id] = 0\n sum_w[job_id] += eft - est\n sum_w_all_new = sum_w_all_old + eft - est\n new_cost_job = {}\n changes = {}\n for j in sum_w.keys():\n if sum_w_all_new == 0:\n new_cost_job[j] = 0\n else:\n new_cost_job[j] = float(new_cost_resource) * sum_w[j\n ] / sum_w_all_new\n if j not in prev_cost_job:\n changes[j] = new_cost_job[j]\n else:\n changes[j] = new_cost_job[j] - prev_cost_job[j]\n if only_this_job:\n return changes[job_id]\n return changes\n",
"step-2": "<mask token>\n\n\nclass Resources(object):\n <mask token>\n <mask token>\n\n def __init__(self, powers, bandwidth):\n number_of_resources = len(powers)\n self.power = powers\n self.tasksOfResource = []\n for i in range(number_of_resources):\n self.tasksOfResource.append([])\n self.len = number_of_resources\n self.bandwidth = bandwidth\n self.job_task_schedule = {}\n <mask token>\n\n def calculate_eft(self, task, resource_id, arrival_time=0):\n g = task.graph\n if resource_id == -1:\n graphs_task_on_resource = []\n task_runtime_on_resource = task.weight / max(self.power)\n else:\n task_runtime_on_resource = task.weight / self.power[resource_id]\n graphs_task_on_resource = list(map(lambda t: t.task.id if t.\n task.graph.name == g.name else -1, self.tasksOfResource[\n resource_id]))\n max_est_of_task = arrival_time\n for p in task.predecessor:\n if p in graphs_task_on_resource:\n communication_delay = 0\n else:\n communication_delay = task.predecessor[p] / self.bandwidth\n if (g.name not in self.job_task_schedule or p not in self.\n job_task_schedule[g.name]):\n continue\n p_eft = self.job_task_schedule[g.name][p].EFT\n if p_eft + communication_delay > max_est_of_task:\n max_est_of_task = p_eft + communication_delay\n start_time, place_id = self.find_gap(resource_id, max_est_of_task,\n task_runtime_on_resource)\n eft_task = start_time + task_runtime_on_resource\n return start_time, eft_task, task_runtime_on_resource, place_id\n <mask token>\n\n def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):\n result = []\n for r in range(0, self.len):\n names = []\n est = []\n eft = []\n\n def add_entries(x):\n if job_id != -1 and x.task.graph.name != job_id:\n return\n names.append(x.task.id if job_id != -1 else\n f'{x.task.graph.name}-{x.task.id}')\n est.append(x.EST)\n eft.append(x.EFT)\n list(map(add_entries, self.tasksOfResource[r]))\n result.append((names, est, eft))\n\n def print_list(x):\n if not print_enabled:\n return\n first = True\n for e in x:\n if first:\n first = False\n else:\n print(',', end=' ')\n print(e, end=' ')\n print()\n print_list(names)\n print_list(est)\n print_list(eft)\n if finishing is not None and print_enabled:\n print(finishing)\n return result\n <mask token>\n\n @property\n def average_power(self):\n return math.fsum(self.power) / self.len\n\n @property\n def makespan(self):\n eft = 0\n for i in range(0, self.len):\n tasks_in_resource = self.tasksOfResource[i]\n if len(tasks_in_resource) == 0:\n continue\n eft = max(eft, tasks_in_resource[-1].EFT)\n return eft\n <mask token>\n\n @property\n def sum_internal_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_gaps_resource(r)\n return sum_gaps\n\n def select_resource(self, task, arrival_time=0):\n (est_best, eft_best, runtime_on_resource_best, place_id_best,\n resource_id_best) = -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (max_est_of_task, eft_task, task_runtime_on_resource, place_id\n ) = self.calculate_eft(task, r, arrival_time=arrival_time)\n if eft_best == -1 or eft_task < eft_best:\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best) = (max_est_of_task,\n eft_task, task_runtime_on_resource, place_id, r)\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best)\n\n def get_fastest_empty_resource(self):\n for r in range(self.len - 1, -1, -1):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n\nclass CostAwareResources(Resources):\n\n def __init__(self, powers, prices, timeslot_len, bandwidth):\n super(CostAwareResources, self).__init__(powers, bandwidth)\n self.timeslot = timeslot_len\n self.price = prices\n self.head_nodes = {}\n self.sum_weight_scheduled = {}\n\n def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True\n ):\n \"\"\"\n computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.\n :param resource_id:\n :param start_time:\n :param eft:\n :param cost_only:\n :return:\n \"\"\"\n tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if\n not t.task.dummy_task]\n if not tasks_in_resource:\n if eft == -1:\n return 0 if cost_only else (0, 0, 0)\n else:\n return math.ceil((eft - start_time) / self.timeslot[\n resource_id]) * self.price[resource_id]\n if start_time != -1:\n task_start_time = min(tasks_in_resource[0].EST, start_time)\n else:\n task_start_time = tasks_in_resource[0].EST\n task_finish_time = max(tasks_in_resource[-1].EFT, eft)\n reservation = task_finish_time - task_start_time\n cost = math.ceil(reservation / self.timeslot[resource_id]\n ) * self.price[resource_id]\n timeslot = self.timeslot[resource_id]\n startof = [x.EST for x in tasks_in_resource]\n endof = [x.EFT for x in tasks_in_resource]\n if start_time != -1:\n startof.append(start_time)\n endof.append(eft)\n startof.sort()\n endof.sort()\n timeslot_start = min(startof)\n last_finish_time = max(endof)\n current_task_id = 0\n rent_periods = []\n while timeslot_start < last_finish_time:\n task_len = endof[current_task_id] - timeslot_start\n time_slot_finish = endof[current_task_id] + (timeslot - \n task_len % timeslot) % timeslot\n current_task_id += 1\n if current_task_id >= len(startof):\n rent_periods.append((timeslot_start, time_slot_finish))\n break\n if startof[current_task_id] <= time_slot_finish:\n pass\n else:\n rent_periods.append((timeslot_start, time_slot_finish))\n timeslot_start = startof[current_task_id]\n sum = 0\n for rp in rent_periods:\n sum += rp[1] - rp[0]\n cost = sum / timeslot * self.price[resource_id]\n if cost_only:\n return cost\n else:\n return cost, min(startof), max(endof)\n\n def resource_start_time(self, resource_id):\n tasks_in_resource = self.tasksOfResource[resource_id]\n length = len(tasks_in_resource)\n start_index = 0\n while length > 0 and tasks_in_resource[start_index].task.dummy_task:\n start_index += 1\n length -= 1\n if length == 0:\n return -1\n return tasks_in_resource[start_index].EST\n\n @property\n def plan_cost(self):\n cost = 0\n for i in range(0, self.len):\n cost += self.resource_cost(i)\n return cost\n\n def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,\n resource_id, task_id=None):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n if ft <= timeslot_start or est >= timeslot_end:\n return 0\n tasks = self.tasksOfResource[resource_id]\n task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)\n sum_w = 0\n for id in task_ids:\n if task_id == id:\n continue\n start_time = tasks[id].EST\n finish_time = tasks[id].EFT\n if start_time < timeslot_start:\n start_time = timeslot_start\n if finish_time > timeslot_end:\n finish_time = timeslot_end\n sum_w += finish_time - start_time\n if est < timeslot_start:\n est = timeslot_start\n if ft > timeslot_end:\n ft = timeslot_end\n if ft == est:\n return 0\n share = float(ft - est) / (sum_w + ft - est)\n return share * self.price[resource_id]\n\n def task_id_in_timeslot(self, resource_id, timeslot_start):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n task_ids = []\n for id in range(len(self.tasksOfResource[resource_id])):\n s = self.tasksOfResource[resource_id][id]\n if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=\n s.EFT <= timeslot_end or s.EST < timeslot_start and \n timeslot_end < s.EFT):\n task_ids.append(id)\n return task_ids\n\n def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,\n task_id=None):\n if task_id is not None:\n est = self.tasksOfResource[resource_id][task_id].EST\n ft = self.tasksOfResource[resource_id][task_id].EFT\n timeslot_len = self.timeslot[resource_id]\n resource_start_time = self.resource_start_time(resource_id)\n if resource_start_time == -1:\n resource_start_time = est\n timeslot_start = float(timeslot_len) * math.floor((est -\n resource_start_time) / timeslot_len) + resource_start_time\n timeslot_end = float(timeslot_len) * math.ceil((ft -\n resource_start_time) / timeslot_len) + resource_start_time\n shared_cost = 0\n for interval in f_range(timeslot_start, timeslot_end + timeslot_len /\n 2, timeslot_len):\n share_in_interval = self.calculate_shared_cost_within_timeslot(\n interval, est, ft, resource_id, task_id)\n shared_cost += share_in_interval\n return shared_cost\n\n def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):\n \"\"\"\n calculates eft and cost of a certain task on a certain resource.\n :param task:Definitions.Task()\n :param resource_id:\n :return:\n \"\"\"\n start_time, eft, runtime_on_resource, place_id = self.calculate_eft(\n task, resource_id, arrival_time=arrival_time)\n if task.dummy_task:\n return start_time, eft, runtime_on_resource, place_id, 0\n else:\n cost = self.calculate_share_cost_change(resource_id, start_time,\n eft, task.graph.name, True)\n return start_time, eft, runtime_on_resource, place_id, cost\n\n def sum_external_gaps_resource(self, r):\n c, s, e = self.resource_cost(r, cost_only=False)\n reservation = e - s\n timeslot = self.timeslot[r]\n gap = timeslot - reservation % timeslot\n if gap == timeslot:\n return 0\n else:\n return gap\n\n @property\n def sum_external_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_external_gaps_resource(r)\n return sum_gaps\n\n @property\n def sum_gaps(self):\n return self.sum_internal_gaps + self.sum_external_gaps\n\n @property\n def occupied_resources(self):\n counter = 0\n for i in range(self.len):\n if self.resource_cost(i) != 0:\n counter += self.price[i]\n return counter\n\n @property\n def gap_rate(self):\n return self.sum_gaps / self.makespan / self.occupied_resources\n\n def select_resource(self, task=Task(), test=None, arrival_time=0):\n eft_best = -1\n\n def something_found():\n return eft_best != -1\n if task.asap is not None:\n if not task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and eft <\n eft_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and eft == eft_best and cost <\n cost_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n elif task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and cost == cost_best and eft <\n eft_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n else:\n return super(CostAwareResources, self).select_resource(task)\n\n def price_of_each_graph(self):\n graph_names = self.job_task_schedule.keys()\n costs = {}\n for name in graph_names:\n costs[name] = 0\n for r in range(self.len):\n for id in range(len(self.tasksOfResource[r])):\n name = self.tasksOfResource[r][id].task.graph.name\n cost = self.calculate_task_shared_cost(resource_id=r,\n task_id=id)\n costs[name] += cost\n return costs\n\n def get_cheapest_empty_resource(self):\n for r in range(self.len):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):\n super(CostAwareResources, self).schedule(task_schedule, place_id)\n if not do_head_nodes:\n return\n if task_schedule.task.graph.name in self.head_nodes:\n prev_heads = self.head_nodes[task_schedule.task.graph.name]\n parents_of_current_task = task_schedule.task.predecessor.keys()\n self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[\n task_schedule.task.graph.name].difference(\n parents_of_current_task)\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n else:\n self.head_nodes[task_schedule.task.graph.name] = set()\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n self.sum_weight_scheduled[task_schedule.task.graph.name] = 0\n self.sum_weight_scheduled[task_schedule.task.graph.name\n ] += task_schedule.task.weight\n\n def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,\n job_id=-1, only_this_job=False):\n sum_w = {}\n for i in range(len(self.tasksOfResource[resource_id])):\n sch = self.tasksOfResource[resource_id][i]\n job = sch.task.graph.name\n if job not in sum_w:\n sum_w[job] = 0\n sum_w[job] += sch.EFT - sch.EST\n sum_w_all_old = sum(sum_w.values())\n prev_cost_resource = self.resource_cost(resource_id)\n prev_cost_job = {}\n for j in sum_w.keys():\n if sum_w_all_old == 0:\n prev_cost_job[j] = 0\n else:\n prev_cost_job[j] = float(prev_cost_resource) * sum_w[j\n ] / sum_w_all_old\n if est == -1:\n return prev_cost_job\n new_cost_resource = self.resource_cost(resource_id, start_time=est,\n eft=eft)\n if job_id not in sum_w:\n sum_w[job_id] = 0\n sum_w[job_id] += eft - est\n sum_w_all_new = sum_w_all_old + eft - est\n new_cost_job = {}\n changes = {}\n for j in sum_w.keys():\n if sum_w_all_new == 0:\n new_cost_job[j] = 0\n else:\n new_cost_job[j] = float(new_cost_resource) * sum_w[j\n ] / sum_w_all_new\n if j not in prev_cost_job:\n changes[j] = new_cost_job[j]\n else:\n changes[j] = new_cost_job[j] - prev_cost_job[j]\n if only_this_job:\n return changes[job_id]\n return changes\n",
"step-3": "<mask token>\n\n\nclass Resources(object):\n <mask token>\n <mask token>\n\n def __init__(self, powers, bandwidth):\n number_of_resources = len(powers)\n self.power = powers\n self.tasksOfResource = []\n for i in range(number_of_resources):\n self.tasksOfResource.append([])\n self.len = number_of_resources\n self.bandwidth = bandwidth\n self.job_task_schedule = {}\n\n def find_gap(self, resource, start_time, runtime):\n \"\"\"\n finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource\n if resource is -1, it does nothing (returns the given start time, and -1 for place)\n \"\"\"\n if resource == -1:\n return start_time, -1\n number_of_tasks = len(self.tasksOfResource[resource])\n if number_of_tasks == 0:\n return start_time, 0\n elif self.tasksOfResource[resource][0].EST >= start_time + runtime:\n return start_time, 0\n elif number_of_tasks == 1:\n if self.tasksOfResource[resource][0].EFT < start_time:\n return start_time, 1\n else:\n return self.tasksOfResource[resource][0].EFT, 1\n else:\n for i in range(1, number_of_tasks):\n if self.tasksOfResource[resource][i].EST <= start_time:\n continue\n elif start_time < self.tasksOfResource[resource][i - 1].EFT:\n gap = self.tasksOfResource[resource][i\n ].EST - self.tasksOfResource[resource][i - 1].EFT\n if gap < runtime:\n continue\n else:\n return self.tasksOfResource[resource][i - 1].EFT, i\n elif self.tasksOfResource[resource][i - 1\n ].EFT <= start_time < self.tasksOfResource[resource][i\n ].EST:\n if self.tasksOfResource[resource][i\n ].EST - start_time < runtime:\n continue\n else:\n return start_time, i\n else:\n return max(self.tasksOfResource[resource][-1].EFT, start_time\n ), -1\n\n def calculate_eft(self, task, resource_id, arrival_time=0):\n g = task.graph\n if resource_id == -1:\n graphs_task_on_resource = []\n task_runtime_on_resource = task.weight / max(self.power)\n else:\n task_runtime_on_resource = task.weight / self.power[resource_id]\n graphs_task_on_resource = list(map(lambda t: t.task.id if t.\n task.graph.name == g.name else -1, self.tasksOfResource[\n resource_id]))\n max_est_of_task = arrival_time\n for p in task.predecessor:\n if p in graphs_task_on_resource:\n communication_delay = 0\n else:\n communication_delay = task.predecessor[p] / self.bandwidth\n if (g.name not in self.job_task_schedule or p not in self.\n job_task_schedule[g.name]):\n continue\n p_eft = self.job_task_schedule[g.name][p].EFT\n if p_eft + communication_delay > max_est_of_task:\n max_est_of_task = p_eft + communication_delay\n start_time, place_id = self.find_gap(resource_id, max_est_of_task,\n task_runtime_on_resource)\n eft_task = start_time + task_runtime_on_resource\n return start_time, eft_task, task_runtime_on_resource, place_id\n\n def schedule(self, task_schedule, place_id=-1):\n \"\"\"\n Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.\n :type task_schedule: TaskSchedule\n :type place_id: int\n \"\"\"\n resource = task_schedule.resource\n if place_id == -1:\n self.tasksOfResource[resource].append(task_schedule)\n else:\n self.tasksOfResource[resource].insert(place_id, task_schedule)\n if task_schedule.task.graph.name in self.job_task_schedule:\n pass\n else:\n self.job_task_schedule[task_schedule.task.graph.name] = {}\n self.job_task_schedule[task_schedule.task.graph.name][task_schedule\n .task.id] = task_schedule\n\n def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):\n result = []\n for r in range(0, self.len):\n names = []\n est = []\n eft = []\n\n def add_entries(x):\n if job_id != -1 and x.task.graph.name != job_id:\n return\n names.append(x.task.id if job_id != -1 else\n f'{x.task.graph.name}-{x.task.id}')\n est.append(x.EST)\n eft.append(x.EFT)\n list(map(add_entries, self.tasksOfResource[r]))\n result.append((names, est, eft))\n\n def print_list(x):\n if not print_enabled:\n return\n first = True\n for e in x:\n if first:\n first = False\n else:\n print(',', end=' ')\n print(e, end=' ')\n print()\n print_list(names)\n print_list(est)\n print_list(eft)\n if finishing is not None and print_enabled:\n print(finishing)\n return result\n\n def write_schedule(self, db_file, test_name='N/A', extra='single',\n policy='', job_count=1):\n w = writer.Writer(db_file)\n w.create_plan()\n w.create_plan_head()\n unique_jobs_id = w.write_plan_head(test_name, policy, job_count)\n\n def add_entries(x):\n (job_name, job_type, task_id, jobs_id, start_time, finish_time,\n resource_id, resource_speed, job_component_id, extra_params\n ) = (x.task.graph.name, x.task.graph.type, x.task.id,\n unique_jobs_id, x.EST, x.EFT, r, self.power[r], policy, extra)\n w.write_plan(job_name, job_type, task_id, jobs_id, start_time,\n finish_time, resource_id, resource_speed, job_component_id,\n extra_params)\n for r in range(0, self.len):\n list(map(add_entries, self.tasksOfResource[r]))\n w.commit()\n w.close()\n\n @property\n def average_power(self):\n return math.fsum(self.power) / self.len\n\n @property\n def makespan(self):\n eft = 0\n for i in range(0, self.len):\n tasks_in_resource = self.tasksOfResource[i]\n if len(tasks_in_resource) == 0:\n continue\n eft = max(eft, tasks_in_resource[-1].EFT)\n return eft\n\n def sum_gaps_resource(self, resource_id):\n tasks_in_current_resource = self.tasksOfResource[resource_id]\n num_tasks = len(tasks_in_current_resource)\n if num_tasks <= 1:\n return 0\n sum_gaps = 0\n for i in range(1, num_tasks):\n if tasks_in_current_resource[i - 1\n ].task.dummy_task or tasks_in_current_resource[i\n ].task.dummy_task:\n continue\n finish_prev = tasks_in_current_resource[i - 1].EFT\n start_current = tasks_in_current_resource[i].EST\n gap_length = start_current - finish_prev\n if gap_length < 0:\n raise Exception('Schedule is not correct, check gaps!')\n sum_gaps += gap_length\n return sum_gaps\n\n @property\n def sum_internal_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_gaps_resource(r)\n return sum_gaps\n\n def select_resource(self, task, arrival_time=0):\n (est_best, eft_best, runtime_on_resource_best, place_id_best,\n resource_id_best) = -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (max_est_of_task, eft_task, task_runtime_on_resource, place_id\n ) = self.calculate_eft(task, r, arrival_time=arrival_time)\n if eft_best == -1 or eft_task < eft_best:\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best) = (max_est_of_task,\n eft_task, task_runtime_on_resource, place_id, r)\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best)\n\n def get_fastest_empty_resource(self):\n for r in range(self.len - 1, -1, -1):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n\nclass CostAwareResources(Resources):\n\n def __init__(self, powers, prices, timeslot_len, bandwidth):\n super(CostAwareResources, self).__init__(powers, bandwidth)\n self.timeslot = timeslot_len\n self.price = prices\n self.head_nodes = {}\n self.sum_weight_scheduled = {}\n\n def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True\n ):\n \"\"\"\n computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.\n :param resource_id:\n :param start_time:\n :param eft:\n :param cost_only:\n :return:\n \"\"\"\n tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if\n not t.task.dummy_task]\n if not tasks_in_resource:\n if eft == -1:\n return 0 if cost_only else (0, 0, 0)\n else:\n return math.ceil((eft - start_time) / self.timeslot[\n resource_id]) * self.price[resource_id]\n if start_time != -1:\n task_start_time = min(tasks_in_resource[0].EST, start_time)\n else:\n task_start_time = tasks_in_resource[0].EST\n task_finish_time = max(tasks_in_resource[-1].EFT, eft)\n reservation = task_finish_time - task_start_time\n cost = math.ceil(reservation / self.timeslot[resource_id]\n ) * self.price[resource_id]\n timeslot = self.timeslot[resource_id]\n startof = [x.EST for x in tasks_in_resource]\n endof = [x.EFT for x in tasks_in_resource]\n if start_time != -1:\n startof.append(start_time)\n endof.append(eft)\n startof.sort()\n endof.sort()\n timeslot_start = min(startof)\n last_finish_time = max(endof)\n current_task_id = 0\n rent_periods = []\n while timeslot_start < last_finish_time:\n task_len = endof[current_task_id] - timeslot_start\n time_slot_finish = endof[current_task_id] + (timeslot - \n task_len % timeslot) % timeslot\n current_task_id += 1\n if current_task_id >= len(startof):\n rent_periods.append((timeslot_start, time_slot_finish))\n break\n if startof[current_task_id] <= time_slot_finish:\n pass\n else:\n rent_periods.append((timeslot_start, time_slot_finish))\n timeslot_start = startof[current_task_id]\n sum = 0\n for rp in rent_periods:\n sum += rp[1] - rp[0]\n cost = sum / timeslot * self.price[resource_id]\n if cost_only:\n return cost\n else:\n return cost, min(startof), max(endof)\n\n def resource_start_time(self, resource_id):\n tasks_in_resource = self.tasksOfResource[resource_id]\n length = len(tasks_in_resource)\n start_index = 0\n while length > 0 and tasks_in_resource[start_index].task.dummy_task:\n start_index += 1\n length -= 1\n if length == 0:\n return -1\n return tasks_in_resource[start_index].EST\n\n @property\n def plan_cost(self):\n cost = 0\n for i in range(0, self.len):\n cost += self.resource_cost(i)\n return cost\n\n def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,\n resource_id, task_id=None):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n if ft <= timeslot_start or est >= timeslot_end:\n return 0\n tasks = self.tasksOfResource[resource_id]\n task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)\n sum_w = 0\n for id in task_ids:\n if task_id == id:\n continue\n start_time = tasks[id].EST\n finish_time = tasks[id].EFT\n if start_time < timeslot_start:\n start_time = timeslot_start\n if finish_time > timeslot_end:\n finish_time = timeslot_end\n sum_w += finish_time - start_time\n if est < timeslot_start:\n est = timeslot_start\n if ft > timeslot_end:\n ft = timeslot_end\n if ft == est:\n return 0\n share = float(ft - est) / (sum_w + ft - est)\n return share * self.price[resource_id]\n\n def task_id_in_timeslot(self, resource_id, timeslot_start):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n task_ids = []\n for id in range(len(self.tasksOfResource[resource_id])):\n s = self.tasksOfResource[resource_id][id]\n if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=\n s.EFT <= timeslot_end or s.EST < timeslot_start and \n timeslot_end < s.EFT):\n task_ids.append(id)\n return task_ids\n\n def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,\n task_id=None):\n if task_id is not None:\n est = self.tasksOfResource[resource_id][task_id].EST\n ft = self.tasksOfResource[resource_id][task_id].EFT\n timeslot_len = self.timeslot[resource_id]\n resource_start_time = self.resource_start_time(resource_id)\n if resource_start_time == -1:\n resource_start_time = est\n timeslot_start = float(timeslot_len) * math.floor((est -\n resource_start_time) / timeslot_len) + resource_start_time\n timeslot_end = float(timeslot_len) * math.ceil((ft -\n resource_start_time) / timeslot_len) + resource_start_time\n shared_cost = 0\n for interval in f_range(timeslot_start, timeslot_end + timeslot_len /\n 2, timeslot_len):\n share_in_interval = self.calculate_shared_cost_within_timeslot(\n interval, est, ft, resource_id, task_id)\n shared_cost += share_in_interval\n return shared_cost\n\n def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):\n \"\"\"\n calculates eft and cost of a certain task on a certain resource.\n :param task:Definitions.Task()\n :param resource_id:\n :return:\n \"\"\"\n start_time, eft, runtime_on_resource, place_id = self.calculate_eft(\n task, resource_id, arrival_time=arrival_time)\n if task.dummy_task:\n return start_time, eft, runtime_on_resource, place_id, 0\n else:\n cost = self.calculate_share_cost_change(resource_id, start_time,\n eft, task.graph.name, True)\n return start_time, eft, runtime_on_resource, place_id, cost\n\n def sum_external_gaps_resource(self, r):\n c, s, e = self.resource_cost(r, cost_only=False)\n reservation = e - s\n timeslot = self.timeslot[r]\n gap = timeslot - reservation % timeslot\n if gap == timeslot:\n return 0\n else:\n return gap\n\n @property\n def sum_external_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_external_gaps_resource(r)\n return sum_gaps\n\n @property\n def sum_gaps(self):\n return self.sum_internal_gaps + self.sum_external_gaps\n\n @property\n def occupied_resources(self):\n counter = 0\n for i in range(self.len):\n if self.resource_cost(i) != 0:\n counter += self.price[i]\n return counter\n\n @property\n def gap_rate(self):\n return self.sum_gaps / self.makespan / self.occupied_resources\n\n def select_resource(self, task=Task(), test=None, arrival_time=0):\n eft_best = -1\n\n def something_found():\n return eft_best != -1\n if task.asap is not None:\n if not task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and eft <\n eft_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and eft == eft_best and cost <\n cost_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n elif task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and cost == cost_best and eft <\n eft_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n else:\n return super(CostAwareResources, self).select_resource(task)\n\n def price_of_each_graph(self):\n graph_names = self.job_task_schedule.keys()\n costs = {}\n for name in graph_names:\n costs[name] = 0\n for r in range(self.len):\n for id in range(len(self.tasksOfResource[r])):\n name = self.tasksOfResource[r][id].task.graph.name\n cost = self.calculate_task_shared_cost(resource_id=r,\n task_id=id)\n costs[name] += cost\n return costs\n\n def get_cheapest_empty_resource(self):\n for r in range(self.len):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):\n super(CostAwareResources, self).schedule(task_schedule, place_id)\n if not do_head_nodes:\n return\n if task_schedule.task.graph.name in self.head_nodes:\n prev_heads = self.head_nodes[task_schedule.task.graph.name]\n parents_of_current_task = task_schedule.task.predecessor.keys()\n self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[\n task_schedule.task.graph.name].difference(\n parents_of_current_task)\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n else:\n self.head_nodes[task_schedule.task.graph.name] = set()\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n self.sum_weight_scheduled[task_schedule.task.graph.name] = 0\n self.sum_weight_scheduled[task_schedule.task.graph.name\n ] += task_schedule.task.weight\n\n def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,\n job_id=-1, only_this_job=False):\n sum_w = {}\n for i in range(len(self.tasksOfResource[resource_id])):\n sch = self.tasksOfResource[resource_id][i]\n job = sch.task.graph.name\n if job not in sum_w:\n sum_w[job] = 0\n sum_w[job] += sch.EFT - sch.EST\n sum_w_all_old = sum(sum_w.values())\n prev_cost_resource = self.resource_cost(resource_id)\n prev_cost_job = {}\n for j in sum_w.keys():\n if sum_w_all_old == 0:\n prev_cost_job[j] = 0\n else:\n prev_cost_job[j] = float(prev_cost_resource) * sum_w[j\n ] / sum_w_all_old\n if est == -1:\n return prev_cost_job\n new_cost_resource = self.resource_cost(resource_id, start_time=est,\n eft=eft)\n if job_id not in sum_w:\n sum_w[job_id] = 0\n sum_w[job_id] += eft - est\n sum_w_all_new = sum_w_all_old + eft - est\n new_cost_job = {}\n changes = {}\n for j in sum_w.keys():\n if sum_w_all_new == 0:\n new_cost_job[j] = 0\n else:\n new_cost_job[j] = float(new_cost_resource) * sum_w[j\n ] / sum_w_all_new\n if j not in prev_cost_job:\n changes[j] = new_cost_job[j]\n else:\n changes[j] = new_cost_job[j] - prev_cost_job[j]\n if only_this_job:\n return changes[job_id]\n return changes\n",
"step-4": "<mask token>\n\n\nclass Constraint(Enum):\n <mask token>\n <mask token>\n <mask token>\n\n\n<mask token>\n\n\nclass TaskSchedule:\n\n def __init__(self, task, est=-1, runtime=-1, eft=-1, resource=-1):\n self.task = task\n self.EFT = eft\n self.EST = est\n self.runtime = runtime\n self.resource = resource\n\n\nclass Resources(object):\n len = -1\n bandwidth = 0\n\n def __init__(self, powers, bandwidth):\n number_of_resources = len(powers)\n self.power = powers\n self.tasksOfResource = []\n for i in range(number_of_resources):\n self.tasksOfResource.append([])\n self.len = number_of_resources\n self.bandwidth = bandwidth\n self.job_task_schedule = {}\n\n def find_gap(self, resource, start_time, runtime):\n \"\"\"\n finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource\n if resource is -1, it does nothing (returns the given start time, and -1 for place)\n \"\"\"\n if resource == -1:\n return start_time, -1\n number_of_tasks = len(self.tasksOfResource[resource])\n if number_of_tasks == 0:\n return start_time, 0\n elif self.tasksOfResource[resource][0].EST >= start_time + runtime:\n return start_time, 0\n elif number_of_tasks == 1:\n if self.tasksOfResource[resource][0].EFT < start_time:\n return start_time, 1\n else:\n return self.tasksOfResource[resource][0].EFT, 1\n else:\n for i in range(1, number_of_tasks):\n if self.tasksOfResource[resource][i].EST <= start_time:\n continue\n elif start_time < self.tasksOfResource[resource][i - 1].EFT:\n gap = self.tasksOfResource[resource][i\n ].EST - self.tasksOfResource[resource][i - 1].EFT\n if gap < runtime:\n continue\n else:\n return self.tasksOfResource[resource][i - 1].EFT, i\n elif self.tasksOfResource[resource][i - 1\n ].EFT <= start_time < self.tasksOfResource[resource][i\n ].EST:\n if self.tasksOfResource[resource][i\n ].EST - start_time < runtime:\n continue\n else:\n return start_time, i\n else:\n return max(self.tasksOfResource[resource][-1].EFT, start_time\n ), -1\n\n def calculate_eft(self, task, resource_id, arrival_time=0):\n g = task.graph\n if resource_id == -1:\n graphs_task_on_resource = []\n task_runtime_on_resource = task.weight / max(self.power)\n else:\n task_runtime_on_resource = task.weight / self.power[resource_id]\n graphs_task_on_resource = list(map(lambda t: t.task.id if t.\n task.graph.name == g.name else -1, self.tasksOfResource[\n resource_id]))\n max_est_of_task = arrival_time\n for p in task.predecessor:\n if p in graphs_task_on_resource:\n communication_delay = 0\n else:\n communication_delay = task.predecessor[p] / self.bandwidth\n if (g.name not in self.job_task_schedule or p not in self.\n job_task_schedule[g.name]):\n continue\n p_eft = self.job_task_schedule[g.name][p].EFT\n if p_eft + communication_delay > max_est_of_task:\n max_est_of_task = p_eft + communication_delay\n start_time, place_id = self.find_gap(resource_id, max_est_of_task,\n task_runtime_on_resource)\n eft_task = start_time + task_runtime_on_resource\n return start_time, eft_task, task_runtime_on_resource, place_id\n\n def schedule(self, task_schedule, place_id=-1):\n \"\"\"\n Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.\n :type task_schedule: TaskSchedule\n :type place_id: int\n \"\"\"\n resource = task_schedule.resource\n if place_id == -1:\n self.tasksOfResource[resource].append(task_schedule)\n else:\n self.tasksOfResource[resource].insert(place_id, task_schedule)\n if task_schedule.task.graph.name in self.job_task_schedule:\n pass\n else:\n self.job_task_schedule[task_schedule.task.graph.name] = {}\n self.job_task_schedule[task_schedule.task.graph.name][task_schedule\n .task.id] = task_schedule\n\n def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):\n result = []\n for r in range(0, self.len):\n names = []\n est = []\n eft = []\n\n def add_entries(x):\n if job_id != -1 and x.task.graph.name != job_id:\n return\n names.append(x.task.id if job_id != -1 else\n f'{x.task.graph.name}-{x.task.id}')\n est.append(x.EST)\n eft.append(x.EFT)\n list(map(add_entries, self.tasksOfResource[r]))\n result.append((names, est, eft))\n\n def print_list(x):\n if not print_enabled:\n return\n first = True\n for e in x:\n if first:\n first = False\n else:\n print(',', end=' ')\n print(e, end=' ')\n print()\n print_list(names)\n print_list(est)\n print_list(eft)\n if finishing is not None and print_enabled:\n print(finishing)\n return result\n\n def write_schedule(self, db_file, test_name='N/A', extra='single',\n policy='', job_count=1):\n w = writer.Writer(db_file)\n w.create_plan()\n w.create_plan_head()\n unique_jobs_id = w.write_plan_head(test_name, policy, job_count)\n\n def add_entries(x):\n (job_name, job_type, task_id, jobs_id, start_time, finish_time,\n resource_id, resource_speed, job_component_id, extra_params\n ) = (x.task.graph.name, x.task.graph.type, x.task.id,\n unique_jobs_id, x.EST, x.EFT, r, self.power[r], policy, extra)\n w.write_plan(job_name, job_type, task_id, jobs_id, start_time,\n finish_time, resource_id, resource_speed, job_component_id,\n extra_params)\n for r in range(0, self.len):\n list(map(add_entries, self.tasksOfResource[r]))\n w.commit()\n w.close()\n\n @property\n def average_power(self):\n return math.fsum(self.power) / self.len\n\n @property\n def makespan(self):\n eft = 0\n for i in range(0, self.len):\n tasks_in_resource = self.tasksOfResource[i]\n if len(tasks_in_resource) == 0:\n continue\n eft = max(eft, tasks_in_resource[-1].EFT)\n return eft\n\n def sum_gaps_resource(self, resource_id):\n tasks_in_current_resource = self.tasksOfResource[resource_id]\n num_tasks = len(tasks_in_current_resource)\n if num_tasks <= 1:\n return 0\n sum_gaps = 0\n for i in range(1, num_tasks):\n if tasks_in_current_resource[i - 1\n ].task.dummy_task or tasks_in_current_resource[i\n ].task.dummy_task:\n continue\n finish_prev = tasks_in_current_resource[i - 1].EFT\n start_current = tasks_in_current_resource[i].EST\n gap_length = start_current - finish_prev\n if gap_length < 0:\n raise Exception('Schedule is not correct, check gaps!')\n sum_gaps += gap_length\n return sum_gaps\n\n @property\n def sum_internal_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_gaps_resource(r)\n return sum_gaps\n\n def select_resource(self, task, arrival_time=0):\n (est_best, eft_best, runtime_on_resource_best, place_id_best,\n resource_id_best) = -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (max_est_of_task, eft_task, task_runtime_on_resource, place_id\n ) = self.calculate_eft(task, r, arrival_time=arrival_time)\n if eft_best == -1 or eft_task < eft_best:\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best) = (max_est_of_task,\n eft_task, task_runtime_on_resource, place_id, r)\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best)\n\n def get_fastest_empty_resource(self):\n for r in range(self.len - 1, -1, -1):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n\nclass CostAwareResources(Resources):\n\n def __init__(self, powers, prices, timeslot_len, bandwidth):\n super(CostAwareResources, self).__init__(powers, bandwidth)\n self.timeslot = timeslot_len\n self.price = prices\n self.head_nodes = {}\n self.sum_weight_scheduled = {}\n\n def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True\n ):\n \"\"\"\n computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.\n :param resource_id:\n :param start_time:\n :param eft:\n :param cost_only:\n :return:\n \"\"\"\n tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if\n not t.task.dummy_task]\n if not tasks_in_resource:\n if eft == -1:\n return 0 if cost_only else (0, 0, 0)\n else:\n return math.ceil((eft - start_time) / self.timeslot[\n resource_id]) * self.price[resource_id]\n if start_time != -1:\n task_start_time = min(tasks_in_resource[0].EST, start_time)\n else:\n task_start_time = tasks_in_resource[0].EST\n task_finish_time = max(tasks_in_resource[-1].EFT, eft)\n reservation = task_finish_time - task_start_time\n cost = math.ceil(reservation / self.timeslot[resource_id]\n ) * self.price[resource_id]\n timeslot = self.timeslot[resource_id]\n startof = [x.EST for x in tasks_in_resource]\n endof = [x.EFT for x in tasks_in_resource]\n if start_time != -1:\n startof.append(start_time)\n endof.append(eft)\n startof.sort()\n endof.sort()\n timeslot_start = min(startof)\n last_finish_time = max(endof)\n current_task_id = 0\n rent_periods = []\n while timeslot_start < last_finish_time:\n task_len = endof[current_task_id] - timeslot_start\n time_slot_finish = endof[current_task_id] + (timeslot - \n task_len % timeslot) % timeslot\n current_task_id += 1\n if current_task_id >= len(startof):\n rent_periods.append((timeslot_start, time_slot_finish))\n break\n if startof[current_task_id] <= time_slot_finish:\n pass\n else:\n rent_periods.append((timeslot_start, time_slot_finish))\n timeslot_start = startof[current_task_id]\n sum = 0\n for rp in rent_periods:\n sum += rp[1] - rp[0]\n cost = sum / timeslot * self.price[resource_id]\n if cost_only:\n return cost\n else:\n return cost, min(startof), max(endof)\n\n def resource_start_time(self, resource_id):\n tasks_in_resource = self.tasksOfResource[resource_id]\n length = len(tasks_in_resource)\n start_index = 0\n while length > 0 and tasks_in_resource[start_index].task.dummy_task:\n start_index += 1\n length -= 1\n if length == 0:\n return -1\n return tasks_in_resource[start_index].EST\n\n @property\n def plan_cost(self):\n cost = 0\n for i in range(0, self.len):\n cost += self.resource_cost(i)\n return cost\n\n def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft,\n resource_id, task_id=None):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n if ft <= timeslot_start or est >= timeslot_end:\n return 0\n tasks = self.tasksOfResource[resource_id]\n task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)\n sum_w = 0\n for id in task_ids:\n if task_id == id:\n continue\n start_time = tasks[id].EST\n finish_time = tasks[id].EFT\n if start_time < timeslot_start:\n start_time = timeslot_start\n if finish_time > timeslot_end:\n finish_time = timeslot_end\n sum_w += finish_time - start_time\n if est < timeslot_start:\n est = timeslot_start\n if ft > timeslot_end:\n ft = timeslot_end\n if ft == est:\n return 0\n share = float(ft - est) / (sum_w + ft - est)\n return share * self.price[resource_id]\n\n def task_id_in_timeslot(self, resource_id, timeslot_start):\n timeslot_end = timeslot_start + self.timeslot[resource_id]\n task_ids = []\n for id in range(len(self.tasksOfResource[resource_id])):\n s = self.tasksOfResource[resource_id][id]\n if (timeslot_start <= s.EST <= timeslot_end or timeslot_start <=\n s.EFT <= timeslot_end or s.EST < timeslot_start and \n timeslot_end < s.EFT):\n task_ids.append(id)\n return task_ids\n\n def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1,\n task_id=None):\n if task_id is not None:\n est = self.tasksOfResource[resource_id][task_id].EST\n ft = self.tasksOfResource[resource_id][task_id].EFT\n timeslot_len = self.timeslot[resource_id]\n resource_start_time = self.resource_start_time(resource_id)\n if resource_start_time == -1:\n resource_start_time = est\n timeslot_start = float(timeslot_len) * math.floor((est -\n resource_start_time) / timeslot_len) + resource_start_time\n timeslot_end = float(timeslot_len) * math.ceil((ft -\n resource_start_time) / timeslot_len) + resource_start_time\n shared_cost = 0\n for interval in f_range(timeslot_start, timeslot_end + timeslot_len /\n 2, timeslot_len):\n share_in_interval = self.calculate_shared_cost_within_timeslot(\n interval, est, ft, resource_id, task_id)\n shared_cost += share_in_interval\n return shared_cost\n\n def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):\n \"\"\"\n calculates eft and cost of a certain task on a certain resource.\n :param task:Definitions.Task()\n :param resource_id:\n :return:\n \"\"\"\n start_time, eft, runtime_on_resource, place_id = self.calculate_eft(\n task, resource_id, arrival_time=arrival_time)\n if task.dummy_task:\n return start_time, eft, runtime_on_resource, place_id, 0\n else:\n cost = self.calculate_share_cost_change(resource_id, start_time,\n eft, task.graph.name, True)\n return start_time, eft, runtime_on_resource, place_id, cost\n\n def sum_external_gaps_resource(self, r):\n c, s, e = self.resource_cost(r, cost_only=False)\n reservation = e - s\n timeslot = self.timeslot[r]\n gap = timeslot - reservation % timeslot\n if gap == timeslot:\n return 0\n else:\n return gap\n\n @property\n def sum_external_gaps(self):\n sum_gaps = 0\n for r in range(0, self.len):\n sum_gaps += self.sum_external_gaps_resource(r)\n return sum_gaps\n\n @property\n def sum_gaps(self):\n return self.sum_internal_gaps + self.sum_external_gaps\n\n @property\n def occupied_resources(self):\n counter = 0\n for i in range(self.len):\n if self.resource_cost(i) != 0:\n counter += self.price[i]\n return counter\n\n @property\n def gap_rate(self):\n return self.sum_gaps / self.makespan / self.occupied_resources\n\n def select_resource(self, task=Task(), test=None, arrival_time=0):\n eft_best = -1\n\n def something_found():\n return eft_best != -1\n if task.asap is not None:\n if not task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and eft <\n eft_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and eft == eft_best and cost <\n cost_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n elif task.asap:\n if not test:\n print('', end='')\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best\n ) = -1, -1, -1, -1, -1, -1\n for r in range(0, self.len):\n (start_time, eft, runtime_on_resource, place_id, cost) = (\n self.calculate_eft_and_cost(task, r, arrival_time=\n arrival_time))\n if (not something_found() or eft < eft_best and task.\n sub_deadline < eft_best or task.sub_budget <\n cost_best and eft <= task.sub_deadline and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and (eft_best > task.sub_deadline or\n cost_best > task.sub_budget) or eft <= task.\n sub_deadline and cost <= task.sub_budget and cost <\n cost_best or eft <= task.sub_deadline and cost <=\n task.sub_budget and cost == cost_best and eft <\n eft_best):\n (est_best, eft_best, runtime_on_resource_best,\n place_id_best, resource_id_best, cost_best) = (\n start_time, eft, runtime_on_resource, place_id,\n r, cost)\n continue\n if not test:\n print('', end='')\n return (est_best, runtime_on_resource_best, eft_best,\n resource_id_best, place_id_best, cost_best)\n else:\n return super(CostAwareResources, self).select_resource(task)\n\n def price_of_each_graph(self):\n graph_names = self.job_task_schedule.keys()\n costs = {}\n for name in graph_names:\n costs[name] = 0\n for r in range(self.len):\n for id in range(len(self.tasksOfResource[r])):\n name = self.tasksOfResource[r][id].task.graph.name\n cost = self.calculate_task_shared_cost(resource_id=r,\n task_id=id)\n costs[name] += cost\n return costs\n\n def get_cheapest_empty_resource(self):\n for r in range(self.len):\n if len(self.tasksOfResource[r]) == 0:\n return r\n else:\n return -1\n\n def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):\n super(CostAwareResources, self).schedule(task_schedule, place_id)\n if not do_head_nodes:\n return\n if task_schedule.task.graph.name in self.head_nodes:\n prev_heads = self.head_nodes[task_schedule.task.graph.name]\n parents_of_current_task = task_schedule.task.predecessor.keys()\n self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[\n task_schedule.task.graph.name].difference(\n parents_of_current_task)\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n else:\n self.head_nodes[task_schedule.task.graph.name] = set()\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule\n .task.id)\n self.sum_weight_scheduled[task_schedule.task.graph.name] = 0\n self.sum_weight_scheduled[task_schedule.task.graph.name\n ] += task_schedule.task.weight\n\n def calculate_share_cost_change(self, resource_id, est=-1, eft=-1,\n job_id=-1, only_this_job=False):\n sum_w = {}\n for i in range(len(self.tasksOfResource[resource_id])):\n sch = self.tasksOfResource[resource_id][i]\n job = sch.task.graph.name\n if job not in sum_w:\n sum_w[job] = 0\n sum_w[job] += sch.EFT - sch.EST\n sum_w_all_old = sum(sum_w.values())\n prev_cost_resource = self.resource_cost(resource_id)\n prev_cost_job = {}\n for j in sum_w.keys():\n if sum_w_all_old == 0:\n prev_cost_job[j] = 0\n else:\n prev_cost_job[j] = float(prev_cost_resource) * sum_w[j\n ] / sum_w_all_old\n if est == -1:\n return prev_cost_job\n new_cost_resource = self.resource_cost(resource_id, start_time=est,\n eft=eft)\n if job_id not in sum_w:\n sum_w[job_id] = 0\n sum_w[job_id] += eft - est\n sum_w_all_new = sum_w_all_old + eft - est\n new_cost_job = {}\n changes = {}\n for j in sum_w.keys():\n if sum_w_all_new == 0:\n new_cost_job[j] = 0\n else:\n new_cost_job[j] = float(new_cost_resource) * sum_w[j\n ] / sum_w_all_new\n if j not in prev_cost_job:\n changes[j] = new_cost_job[j]\n else:\n changes[j] = new_cost_job[j] - prev_cost_job[j]\n if only_this_job:\n return changes[job_id]\n return changes\n",
"step-5": "from __future__ import print_function\r\n\r\nimport math\r\n\r\nimport db\r\nfrom db import writer\r\nfrom enum import Enum\r\n\r\nfrom Definitions.Graph import Task\r\n\r\n\r\nclass Constraint(Enum):\r\n deadline = 1\r\n budget = 2\r\n none = 3\r\n\r\n\r\ndef f_range(x, y, jump):\r\n while x < y:\r\n yield x\r\n x += jump\r\n\r\n\r\nclass TaskSchedule:\r\n def __init__(self, task, est=-1, runtime=-1, eft=-1, resource=-1):\r\n self.task = task\r\n self.EFT = eft\r\n self.EST = est\r\n self.runtime = runtime\r\n self.resource = resource\r\n\r\n\r\nclass Resources(object):\r\n len = -1\r\n bandwidth = 0\r\n\r\n def __init__(self, powers, bandwidth): # e.g. [1,1,2,2,4]\r\n number_of_resources = len(powers)\r\n self.power = powers\r\n self.tasksOfResource = [] # ordered set of TaskSchedule objects in every resource\r\n for i in range(number_of_resources):\r\n self.tasksOfResource.append([])\r\n self.len = number_of_resources\r\n self.bandwidth = bandwidth\r\n self.job_task_schedule = {} # job_task_schedule['Mine_10_1'][4].EFT == 12\r\n\r\n def find_gap(self, resource, start_time, runtime):\r\n '''\r\n finds a gap in resource and returns the start time and the place index of the task among the current tasks of the resource\r\n if resource is -1, it does nothing (returns the given start time, and -1 for place)\r\n '''\r\n if resource == -1:\r\n return start_time, -1\r\n number_of_tasks = len(self.tasksOfResource[resource])\r\n if number_of_tasks == 0:\r\n return start_time, 0\r\n elif self.tasksOfResource[resource][0].EST >= start_time + runtime:\r\n return start_time, 0\r\n elif number_of_tasks == 1:\r\n if self.tasksOfResource[resource][0].EFT < start_time:\r\n return start_time, 1\r\n else:\r\n return self.tasksOfResource[resource][0].EFT, 1\r\n else:\r\n for i in range(1, number_of_tasks):\r\n if self.tasksOfResource[resource][i].EST <= start_time:\r\n continue\r\n elif start_time < self.tasksOfResource[resource][i - 1].EFT:\r\n gap = self.tasksOfResource[resource][i].EST - self.tasksOfResource[resource][i - 1].EFT\r\n if gap < runtime:\r\n continue\r\n else:\r\n return self.tasksOfResource[resource][i - 1].EFT, i\r\n elif self.tasksOfResource[resource][i - 1].EFT <= start_time < self.tasksOfResource[resource][i].EST:\r\n if self.tasksOfResource[resource][i].EST - start_time < runtime:\r\n continue\r\n else:\r\n return start_time, i\r\n else: # no gap is found, put it at the end (it can be done using append method)\r\n return max(self.tasksOfResource[resource][-1].EFT, start_time), -1\r\n\r\n def calculate_eft(self, task, resource_id, arrival_time=0):\r\n g = task.graph\r\n if resource_id == -1:\r\n graphs_task_on_resource = []\r\n task_runtime_on_resource = task.weight / max(self.power)\r\n else:\r\n task_runtime_on_resource = task.weight / self.power[resource_id]\r\n graphs_task_on_resource = list(\r\n map(lambda t: t.task.id if t.task.graph.name == g.name else -1, self.tasksOfResource[resource_id]))\r\n max_est_of_task = arrival_time\r\n for p in task.predecessor:\r\n # check if p and task.id on the same resource_id\r\n if p in graphs_task_on_resource:\r\n communication_delay = 0\r\n else:\r\n communication_delay = task.predecessor[p] / self.bandwidth\r\n if g.name not in self.job_task_schedule or p not in self.job_task_schedule[g.name]:\r\n continue\r\n p_eft = self.job_task_schedule[g.name][p].EFT\r\n if p_eft + communication_delay > max_est_of_task:\r\n max_est_of_task = p_eft + communication_delay\r\n # EST Of Task is found and stored in max_est_of_task\r\n # Find a gap to schedule it:\r\n start_time, place_id = self.find_gap(resource_id, max_est_of_task, task_runtime_on_resource)\r\n eft_task = start_time + task_runtime_on_resource\r\n return start_time, eft_task, task_runtime_on_resource, place_id\r\n\r\n def schedule(self, task_schedule, place_id=-1):\r\n \"\"\"\r\n Schedules a task in a place id. if place_id is -1 the schedule is appended to the last.\r\n :type task_schedule: TaskSchedule\r\n :type place_id: int\r\n \"\"\"\r\n resource = task_schedule.resource\r\n if place_id == -1:\r\n self.tasksOfResource[resource].append(task_schedule)\r\n else:\r\n self.tasksOfResource[resource].insert(place_id, task_schedule)\r\n if task_schedule.task.graph.name in self.job_task_schedule:\r\n pass\r\n else:\r\n self.job_task_schedule[task_schedule.task.graph.name] = {}\r\n self.job_task_schedule[task_schedule.task.graph.name][task_schedule.task.id] = task_schedule\r\n\r\n\r\n def show_schedule(self, job_id=-1, finishing=None, print_enabled=False):\r\n result = []\r\n for r in range(0, self.len):\r\n names = []\r\n est = []\r\n eft = []\r\n\r\n def add_entries(x):\r\n if job_id != -1 and x.task.graph.name != job_id:\r\n return\r\n names.append(x.task.id if job_id != -1 else f'{x.task.graph.name}-{x.task.id}')\r\n est.append(x.EST)\r\n eft.append(x.EFT)\r\n\r\n list(map(add_entries, self.tasksOfResource[r]))\r\n\r\n result.append((names, est, eft))\r\n\r\n def print_list(x):\r\n if not print_enabled:\r\n return\r\n first = True\r\n for e in x:\r\n if first:\r\n first = False\r\n else:\r\n print(',', end=' ')\r\n print(e, end=' ')\r\n print()\r\n\r\n print_list(names)\r\n print_list(est)\r\n print_list(eft)\r\n if finishing is not None and print_enabled:\r\n print(finishing)\r\n\r\n return result\r\n\r\n\r\n def write_schedule(self, db_file, test_name='N/A', extra='single', policy='', job_count=1):\r\n w = writer.Writer(db_file)\r\n w.create_plan()\r\n\r\n w.create_plan_head()\r\n unique_jobs_id = w.write_plan_head(test_name, policy, job_count)\r\n\r\n def add_entries(x):\r\n job_name, job_type, task_id, jobs_id, start_time,\\\r\n finish_time, resource_id, resource_speed, \\\r\n job_component_id, extra_params = x.task.graph.name, x.task.graph.type, x.task.id, unique_jobs_id\\\r\n , x.EST,\\\r\n x.EFT, r, self.power[r], policy, extra\r\n\r\n w.write_plan(job_name, job_type, task_id, jobs_id, start_time, finish_time, resource_id,\r\n resource_speed, job_component_id, extra_params)\r\n\r\n for r in range(0, self.len):\r\n list(map(add_entries, self.tasksOfResource[r]))\r\n\r\n w.commit()\r\n w.close()\r\n\r\n @property\r\n def average_power(self):\r\n return math.fsum(self.power) / self.len\r\n\r\n @property\r\n def makespan(self):\r\n eft = 0\r\n for i in range(0, self.len):\r\n tasks_in_resource = self.tasksOfResource[i]\r\n if len(tasks_in_resource) == 0:\r\n continue\r\n eft = max(eft, tasks_in_resource[-1].EFT)\r\n return eft\r\n\r\n def sum_gaps_resource(self, resource_id):\r\n tasks_in_current_resource = self.tasksOfResource[resource_id]\r\n num_tasks = len(tasks_in_current_resource)\r\n if num_tasks <= 1:\r\n return 0\r\n sum_gaps = 0\r\n for i in range(1, num_tasks):\r\n if tasks_in_current_resource[i - 1].task.dummy_task or tasks_in_current_resource[i].task.dummy_task:\r\n continue\r\n finish_prev = tasks_in_current_resource[i - 1].EFT\r\n start_current = tasks_in_current_resource[i].EST\r\n gap_length = start_current - finish_prev\r\n if gap_length < 0:\r\n raise Exception('Schedule is not correct, check gaps!')\r\n sum_gaps += gap_length\r\n return sum_gaps\r\n\r\n @property\r\n def sum_internal_gaps(self):\r\n sum_gaps = 0\r\n for r in range(0, self.len):\r\n sum_gaps += self.sum_gaps_resource(r)\r\n return sum_gaps\r\n\r\n def select_resource(self, task, arrival_time=0):\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = -1, -1, -1, -1, -1\r\n for r in range(0, self.len):\r\n max_est_of_task, eft_task, task_runtime_on_resource, place_id = self.calculate_eft(task, r, arrival_time=arrival_time)\r\n if eft_best == -1 or eft_task < eft_best:\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best = \\\r\n max_est_of_task, eft_task, task_runtime_on_resource, place_id, r\r\n return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best\r\n\r\n def get_fastest_empty_resource(self):\r\n for r in range(self.len - 1, -1, -1):\r\n if len(self.tasksOfResource[r]) == 0:\r\n return r\r\n else:\r\n return -1\r\n\r\n\r\nclass CostAwareResources(Resources):\r\n def __init__(self, powers, prices, timeslot_len, bandwidth):\r\n super(CostAwareResources, self).__init__(powers, bandwidth)\r\n self.timeslot = timeslot_len\r\n self.price = prices\r\n self.head_nodes = {}\r\n self.sum_weight_scheduled = {}\r\n\r\n\r\n def resource_cost(self, resource_id, start_time=-1, eft=-1, cost_only=True):\r\n \"\"\"\r\n computes a resource's cost. if cost_only==True, only returns cost, otherwise it returns also start and finish-times.\r\n :param resource_id:\r\n :param start_time:\r\n :param eft:\r\n :param cost_only:\r\n :return:\r\n \"\"\"\r\n tasks_in_resource = [t for t in self.tasksOfResource[resource_id] if not t.task.dummy_task]\r\n if not tasks_in_resource:\r\n if eft == -1:\r\n return 0 if cost_only else (0, 0, 0)\r\n else:\r\n return math.ceil((eft - start_time) / self.timeslot[resource_id]) * self.price[resource_id]\r\n if start_time != -1:\r\n task_start_time = min(tasks_in_resource[0].EST, start_time)\r\n else:\r\n task_start_time = tasks_in_resource[0].EST\r\n task_finish_time = max(tasks_in_resource[-1].EFT, eft)\r\n reservation = task_finish_time - task_start_time\r\n cost = math.ceil(reservation / self.timeslot[resource_id]) * self.price[resource_id]\r\n\r\n\r\n timeslot = self.timeslot[resource_id]\r\n startof = [x.EST for x in tasks_in_resource]\r\n endof = [x.EFT for x in tasks_in_resource]\r\n\r\n if start_time != -1:\r\n startof.append(start_time)\r\n endof.append(eft)\r\n startof.sort()\r\n endof.sort()\r\n\r\n timeslot_start = min(startof)\r\n last_finish_time = max(endof)\r\n current_task_id = 0\r\n\r\n rent_periods = []\r\n\r\n while timeslot_start < last_finish_time:\r\n task_len = endof[current_task_id] - timeslot_start\r\n time_slot_finish = endof[current_task_id] + (timeslot - (task_len % timeslot)) % timeslot\r\n current_task_id += 1\r\n if current_task_id >= len(startof):\r\n rent_periods.append((timeslot_start, time_slot_finish))\r\n break\r\n if startof[current_task_id] <= time_slot_finish:\r\n pass\r\n else:\r\n rent_periods.append((timeslot_start, time_slot_finish))\r\n timeslot_start = startof[current_task_id]\r\n\r\n sum = 0\r\n for rp in rent_periods:\r\n sum += (rp[1] - rp[0])\r\n cost = sum / timeslot * self.price[resource_id]\r\n\r\n if cost_only:\r\n return cost\r\n else:\r\n return cost, min(startof), (max(endof))\r\n\r\n\r\n def resource_start_time(self, resource_id):\r\n tasks_in_resource = self.tasksOfResource[resource_id]\r\n length = len(tasks_in_resource)\r\n start_index = 0\r\n while length > 0 and tasks_in_resource[start_index].task.dummy_task:\r\n start_index += 1\r\n length -= 1\r\n if length == 0:\r\n return -1\r\n return tasks_in_resource[start_index].EST\r\n\r\n @property\r\n def plan_cost(self):\r\n cost = 0\r\n for i in range(0, self.len):\r\n cost += self.resource_cost(i)\r\n return cost\r\n\r\n def calculate_shared_cost_within_timeslot(self, timeslot_start, est, ft, resource_id, task_id=None):\r\n timeslot_end = timeslot_start + self.timeslot[resource_id]\r\n if ft <= timeslot_start or est >= timeslot_end:\r\n return 0\r\n tasks = self.tasksOfResource[resource_id]\r\n task_ids = self.task_id_in_timeslot(resource_id, timeslot_start)\r\n sum_w = 0\r\n for id in task_ids:\r\n if task_id == id:\r\n continue\r\n start_time = tasks[id].EST\r\n finish_time = tasks[id].EFT\r\n if start_time < timeslot_start:\r\n start_time = timeslot_start\r\n if finish_time > timeslot_end:\r\n finish_time = timeslot_end\r\n sum_w += finish_time - start_time\r\n if est < timeslot_start:\r\n est = timeslot_start\r\n if ft > timeslot_end:\r\n ft = timeslot_end\r\n if ft == est:\r\n return 0\r\n share = float(ft - est) / (sum_w + ft - est)\r\n return share * self.price[resource_id]\r\n\r\n def task_id_in_timeslot(self, resource_id, timeslot_start):\r\n timeslot_end = timeslot_start + self.timeslot[resource_id]\r\n task_ids = []\r\n\r\n for id in range(len(self.tasksOfResource[resource_id])):\r\n s = self.tasksOfResource[resource_id][id]\r\n if timeslot_start <= s.EST <= timeslot_end or timeslot_start <= s.EFT <= timeslot_end \\\r\n or s.EST < timeslot_start and timeslot_end < s.EFT:\r\n task_ids.append(id)\r\n return task_ids\r\n\r\n def calculate_task_shared_cost(self, est=-1, ft=-1, resource_id=-1, task_id=None):\r\n if task_id is not None:\r\n # this task has already been scheduled\r\n est = self.tasksOfResource[resource_id][task_id].EST\r\n ft = self.tasksOfResource[resource_id][task_id].EFT\r\n\r\n timeslot_len = self.timeslot[resource_id]\r\n resource_start_time = self.resource_start_time(resource_id)\r\n if resource_start_time == -1:\r\n resource_start_time = est\r\n timeslot_start = float(timeslot_len) * math.floor((est - resource_start_time) /\r\n timeslot_len) + resource_start_time\r\n timeslot_end = float(timeslot_len) * math.ceil((ft - resource_start_time) /\r\n timeslot_len) + resource_start_time\r\n shared_cost = 0\r\n for interval in f_range(timeslot_start, timeslot_end + timeslot_len / 2, timeslot_len):\r\n share_in_interval = self.calculate_shared_cost_within_timeslot(interval, est, ft, resource_id, task_id)\r\n shared_cost += share_in_interval\r\n return shared_cost\r\n\r\n\r\n def calculate_eft_and_cost(self, task, resource_id, arrival_time=0):\r\n \"\"\"\r\n calculates eft and cost of a certain task on a certain resource.\r\n :param task:Definitions.Task()\r\n :param resource_id:\r\n :return:\r\n \"\"\"\r\n start_time, eft, runtime_on_resource, place_id = self.calculate_eft(task, resource_id, arrival_time=arrival_time)\r\n if task.dummy_task:\r\n return start_time, eft, runtime_on_resource, place_id, 0\r\n else:\r\n cost = self.calculate_share_cost_change(resource_id, start_time, eft, task.graph.name, True)\r\n return start_time, eft, runtime_on_resource, place_id, cost\r\n\r\n\r\n def sum_external_gaps_resource(self, r):\r\n c, s, e = self.resource_cost(r, cost_only=False)\r\n reservation = e - s\r\n timeslot = self.timeslot[r]\r\n gap = timeslot - reservation % timeslot\r\n if gap == timeslot:\r\n return 0\r\n else:\r\n return gap\r\n\r\n @property\r\n def sum_external_gaps(self):\r\n sum_gaps = 0\r\n for r in range(0, self.len):\r\n sum_gaps += self.sum_external_gaps_resource(r)\r\n return sum_gaps\r\n\r\n @property\r\n def sum_gaps(self):\r\n return self.sum_internal_gaps + self.sum_external_gaps\r\n\r\n @property\r\n def occupied_resources(self):\r\n counter = 0\r\n for i in range(self.len):\r\n if self.resource_cost(i) != 0:\r\n counter += self.price[i]\r\n return counter\r\n\r\n @property\r\n def gap_rate(self):\r\n return self.sum_gaps / self.makespan / self.occupied_resources\r\n\r\n def select_resource(self, task=Task(), test=None, arrival_time=0):\r\n eft_best = -1\r\n def something_found():\r\n return eft_best != -1\r\n\r\n if task.asap is not None:\r\n if not task.asap: # budget workflow\r\n if not test:\r\n print('', end='')\r\n # fastest affordable\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \\\r\n -1, -1, -1, -1, -1, -1\r\n\r\n for r in range(0, self.len):\r\n start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)\r\n if not something_found() or \\\r\n eft < eft_best and task.sub_deadline < eft_best or \\\r\n task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and \\\r\n (eft_best > task.sub_deadline or cost_best > task.sub_budget) or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and eft < eft_best or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and eft == eft_best and cost < cost_best:\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \\\r\n start_time, eft, runtime_on_resource, place_id, r, cost\r\n continue\r\n if not test:\r\n print('', end='')\r\n return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best\r\n elif task.asap: # deadline workflow\r\n # cheapest before sub-deadline\r\n if not test:\r\n print('', end='')\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \\\r\n -1, -1, -1, -1, -1, -1\r\n for r in range(0, self.len):\r\n start_time, eft, runtime_on_resource, place_id, cost = self.calculate_eft_and_cost(task, r, arrival_time=arrival_time)\r\n # if eft_best == -1 or eft_best > eft > task.sub_deadline or task.sub_deadline >= eft and (\r\n # cost < cost_best or eft_best > task.sub_deadline):\r\n if not something_found() or \\\r\n eft < eft_best and task.sub_deadline < eft_best or \\\r\n task.sub_budget < cost_best and eft <= task.sub_deadline and cost < cost_best or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and \\\r\n (eft_best > task.sub_deadline or cost_best > task.sub_budget) or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and cost < cost_best or \\\r\n eft <= task.sub_deadline and cost <= task.sub_budget and cost == cost_best and eft < eft_best:\r\n est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \\\r\n start_time, eft, runtime_on_resource, place_id, r, cost\r\n # if cost_best == -1 or cost_best > cost > task.sub_budget or task.sub_budget >= cost and (\r\n # eft < eft_best or cost_best > task.sub_budget):\r\n # est_best, eft_best, runtime_on_resource_best, place_id_best, resource_id_best, cost_best = \\\r\n # start_time, eft, runtime_on_resource, place_id, r, cost\r\n continue\r\n if not test:\r\n print('', end='')\r\n return est_best, runtime_on_resource_best, eft_best, resource_id_best, place_id_best, cost_best\r\n else:\r\n # minimize time (as in HEFT) TODO: it doesn't return cost (as the sixth return value)\r\n return super(CostAwareResources, self).select_resource(task)\r\n\r\n def price_of_each_graph(self):\r\n graph_names = self.job_task_schedule.keys()\r\n costs = {}\r\n for name in graph_names:\r\n costs[name] = 0\r\n for r in range(self.len):\r\n for id in range(len(self.tasksOfResource[r])):\r\n name = self.tasksOfResource[r][id].task.graph.name\r\n cost = self.calculate_task_shared_cost(resource_id=r, task_id=id)\r\n costs[name] += cost\r\n return costs\r\n\r\n def get_cheapest_empty_resource(self):\r\n for r in range(self.len):\r\n if len(self.tasksOfResource[r]) == 0:\r\n return r\r\n else:\r\n return -1\r\n\r\n def schedule(self, task_schedule, place_id=-1, do_head_nodes=False):\r\n super(CostAwareResources, self).schedule(task_schedule, place_id)\r\n\r\n # head_node computations:\r\n if not do_head_nodes:\r\n return\r\n if task_schedule.task.graph.name in self.head_nodes:\r\n prev_heads = self.head_nodes[task_schedule.task.graph.name]\r\n parents_of_current_task = task_schedule.task.predecessor.keys()\r\n self.head_nodes[task_schedule.task.graph.name] = self.head_nodes[task_schedule.task.graph.name].difference(\r\n parents_of_current_task)\r\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)\r\n else:\r\n self.head_nodes[task_schedule.task.graph.name] = set()\r\n self.head_nodes[task_schedule.task.graph.name].add(task_schedule.task.id)\r\n self.sum_weight_scheduled[task_schedule.task.graph.name] = 0\r\n\r\n self.sum_weight_scheduled[task_schedule.task.graph.name] += task_schedule.task.weight\r\n\r\n def calculate_share_cost_change(self, resource_id, est=-1, eft=-1, job_id=-1, only_this_job=False):\r\n sum_w = {}\r\n for i in range(len(self.tasksOfResource[resource_id])):\r\n sch = self.tasksOfResource[resource_id][i]\r\n job = sch.task.graph.name\r\n if job not in sum_w:\r\n sum_w[job] = 0\r\n sum_w[job] += sch.EFT - sch.EST\r\n sum_w_all_old = sum(sum_w.values())\r\n prev_cost_resource = self.resource_cost(resource_id)\r\n prev_cost_job = {}\r\n for j in sum_w.keys():\r\n if sum_w_all_old == 0:\r\n prev_cost_job[j] = 0\r\n else:\r\n prev_cost_job[j] = float(prev_cost_resource) * sum_w[j] / sum_w_all_old\r\n if est == -1:\r\n return prev_cost_job\r\n\r\n new_cost_resource = self.resource_cost(resource_id, start_time=est, eft=eft)\r\n if job_id not in sum_w:\r\n sum_w[job_id] = 0\r\n sum_w[job_id] += eft - est\r\n sum_w_all_new = sum_w_all_old + eft - est\r\n\r\n new_cost_job = {}\r\n changes = {}\r\n for j in sum_w.keys():\r\n if sum_w_all_new == 0:\r\n new_cost_job[j] = 0\r\n else:\r\n new_cost_job[j] = float(new_cost_resource) * sum_w[j] / sum_w_all_new\r\n if j not in prev_cost_job:\r\n changes[j] = new_cost_job[j]\r\n else:\r\n changes[j] = new_cost_job[j] - prev_cost_job[j]\r\n if only_this_job:\r\n return changes[job_id]\r\n return changes\r\n",
"step-ids": [
22,
28,
32,
36,
40
]
}
|
[
22,
28,
32,
36,
40
] |
import numpy as np
import matplotlib.pyplot as plt
def sample_1(N):
numeros=np.array([-10, -5, 3, 9])
return np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])#devuelve distro aleatoria con las probabilidades indicadas
def sample_2(N):
return np.random.exponential(0.5,N)#devuelve numeros aleatorios con distro exp con beta = 0.5
def get_mean(sampling_fun,N,M):
medias=np.zeros(M)#arreglo de medias
for i in range(M):#recorrido para sacar las m medias
medias[i]=np.mean(sampling_fun(N))
return medias
n=np.array([10,100,1000])#arreglo con los distintos valores de n
m=10000#valor de M
medias_1=np.zeros((m,3))#arreglo que guarta las m medias para 3 enes de sample1
medias_2=np.zeros((m,3))#lo de arriba pero con sample 2
texto='sample_'#texto que me da pereza escribir dos veces
for i in range(3):#recorrido para cada n
medias_1[:,i]=get_mean(sample_1,n[i],m)
medias_2[:,i]=get_mean(sample_2,n[i],m)
np.savetxt(texto+'1_'+str(n[i])+'.txt',medias_1[:,i])#archivo con las m medias para cada n
np.savetxt(texto+'2_'+str(n[i])+'.txt',medias_2[:,i])
|
normal
|
{
"blob_id": "d2d04686b3d7f8d01ca195750ca625baa06ed098",
"index": 2835,
"step-1": "<mask token>\n\n\ndef sample_1(N):\n numeros = np.array([-10, -5, 3, 9])\n return np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])\n\n\ndef sample_2(N):\n return np.random.exponential(0.5, N)\n\n\ndef get_mean(sampling_fun, N, M):\n medias = np.zeros(M)\n for i in range(M):\n medias[i] = np.mean(sampling_fun(N))\n return medias\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sample_1(N):\n numeros = np.array([-10, -5, 3, 9])\n return np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])\n\n\ndef sample_2(N):\n return np.random.exponential(0.5, N)\n\n\ndef get_mean(sampling_fun, N, M):\n medias = np.zeros(M)\n for i in range(M):\n medias[i] = np.mean(sampling_fun(N))\n return medias\n\n\n<mask token>\nfor i in range(3):\n medias_1[:, i] = get_mean(sample_1, n[i], m)\n medias_2[:, i] = get_mean(sample_2, n[i], m)\n np.savetxt(texto + '1_' + str(n[i]) + '.txt', medias_1[:, i])\n np.savetxt(texto + '2_' + str(n[i]) + '.txt', medias_2[:, i])\n",
"step-3": "<mask token>\n\n\ndef sample_1(N):\n numeros = np.array([-10, -5, 3, 9])\n return np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])\n\n\ndef sample_2(N):\n return np.random.exponential(0.5, N)\n\n\ndef get_mean(sampling_fun, N, M):\n medias = np.zeros(M)\n for i in range(M):\n medias[i] = np.mean(sampling_fun(N))\n return medias\n\n\nn = np.array([10, 100, 1000])\nm = 10000\nmedias_1 = np.zeros((m, 3))\nmedias_2 = np.zeros((m, 3))\ntexto = 'sample_'\nfor i in range(3):\n medias_1[:, i] = get_mean(sample_1, n[i], m)\n medias_2[:, i] = get_mean(sample_2, n[i], m)\n np.savetxt(texto + '1_' + str(n[i]) + '.txt', medias_1[:, i])\n np.savetxt(texto + '2_' + str(n[i]) + '.txt', medias_2[:, i])\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef sample_1(N):\n numeros = np.array([-10, -5, 3, 9])\n return np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])\n\n\ndef sample_2(N):\n return np.random.exponential(0.5, N)\n\n\ndef get_mean(sampling_fun, N, M):\n medias = np.zeros(M)\n for i in range(M):\n medias[i] = np.mean(sampling_fun(N))\n return medias\n\n\nn = np.array([10, 100, 1000])\nm = 10000\nmedias_1 = np.zeros((m, 3))\nmedias_2 = np.zeros((m, 3))\ntexto = 'sample_'\nfor i in range(3):\n medias_1[:, i] = get_mean(sample_1, n[i], m)\n medias_2[:, i] = get_mean(sample_2, n[i], m)\n np.savetxt(texto + '1_' + str(n[i]) + '.txt', medias_1[:, i])\n np.savetxt(texto + '2_' + str(n[i]) + '.txt', medias_2[:, i])\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\n\ndef sample_1(N):\n\tnumeros=np.array([-10, -5, 3, 9])\n\treturn np.random.choice(numeros, N, p=[0.1, 0.4, 0.2, 0.3])#devuelve distro aleatoria con las probabilidades indicadas\n\ndef sample_2(N):\n\treturn np.random.exponential(0.5,N)#devuelve numeros aleatorios con distro exp con beta = 0.5\n\ndef get_mean(sampling_fun,N,M):\n\tmedias=np.zeros(M)#arreglo de medias\n\tfor i in range(M):#recorrido para sacar las m medias\n\t\tmedias[i]=np.mean(sampling_fun(N))\n\treturn medias\n\nn=np.array([10,100,1000])#arreglo con los distintos valores de n\nm=10000#valor de M\nmedias_1=np.zeros((m,3))#arreglo que guarta las m medias para 3 enes de sample1\nmedias_2=np.zeros((m,3))#lo de arriba pero con sample 2\ntexto='sample_'#texto que me da pereza escribir dos veces\nfor i in range(3):#recorrido para cada n\n\tmedias_1[:,i]=get_mean(sample_1,n[i],m)\n\tmedias_2[:,i]=get_mean(sample_2,n[i],m)\n\tnp.savetxt(texto+'1_'+str(n[i])+'.txt',medias_1[:,i])#archivo con las m medias para cada n\n\tnp.savetxt(texto+'2_'+str(n[i])+'.txt',medias_2[:,i])\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""
OBJECTIVE: Given a list, sort it from low to high using the QUICK SORT algorithm
Quicksort first divides a large array into two smaller sub-arrays: the low elements and the high elements.
Quicksort can then recursively sort the sub-arrays.
The steps are:
1. Pick an element, called a pivot, from the array.
2. Partitioning: reorder the array so that all elements with values less than the pivot come before the pivot,
while all elements with values greater than the pivot come after it (equal values can go either way).
After this partitioning, the pivot is in its final position. This is called the partition operation.
3. Recursively apply the above steps to the sub-array of elements with smaller values
and separately to the sub-array of elements with greater values.
The base case of the recursion is arrays of size zero or one, which are in order by definition,
so they never need to be sorted.
https://www.geeksforgeeks.org/quick-sort/
"""
def quick_sort(array: list) -> list:
return []
|
normal
|
{
"blob_id": "04099c46c029af37a08b3861809da13b3cc3153b",
"index": 997,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef quick_sort(array: list) ->list:\n return []\n",
"step-3": "\"\"\"\nOBJECTIVE: Given a list, sort it from low to high using the QUICK SORT algorithm\n\nQuicksort first divides a large array into two smaller sub-arrays: the low elements and the high elements.\nQuicksort can then recursively sort the sub-arrays.\n\nThe steps are:\n\n1. Pick an element, called a pivot, from the array.\n2. Partitioning: reorder the array so that all elements with values less than the pivot come before the pivot,\n while all elements with values greater than the pivot come after it (equal values can go either way).\n After this partitioning, the pivot is in its final position. This is called the partition operation.\n3. Recursively apply the above steps to the sub-array of elements with smaller values\n and separately to the sub-array of elements with greater values.\n\nThe base case of the recursion is arrays of size zero or one, which are in order by definition,\n so they never need to be sorted.\n\nhttps://www.geeksforgeeks.org/quick-sort/\n\"\"\"\n\n\ndef quick_sort(array: list) -> list:\n return []\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from django.urls import path
from django.views.decorators.csrf import csrf_exempt
from .views import TestView, index, setup_fraud_detection, verify_testing_works
urlpatterns = [
path('test/<str:name>/', index, name='index'),
path('ml/setup/', setup_fraud_detection, name='fraud_detection_setup'),
path('ml/verify/', verify_testing_works, name='fraud_verification'),
path('class/<str:name>/', csrf_exempt(TestView.as_view()), name='test_class'),
# path('mine/', MyView.as_view(), name='my-view'),
]
|
normal
|
{
"blob_id": "263347d1d445643f9c84e36a8cbb5304581ebaf6",
"index": 3888,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [path('test/<str:name>/', index, name='index'), path(\n 'ml/setup/', setup_fraud_detection, name='fraud_detection_setup'), path\n ('ml/verify/', verify_testing_works, name='fraud_verification'), path(\n 'class/<str:name>/', csrf_exempt(TestView.as_view()), name='test_class')]\n",
"step-3": "from django.urls import path\nfrom django.views.decorators.csrf import csrf_exempt\nfrom .views import TestView, index, setup_fraud_detection, verify_testing_works\nurlpatterns = [path('test/<str:name>/', index, name='index'), path(\n 'ml/setup/', setup_fraud_detection, name='fraud_detection_setup'), path\n ('ml/verify/', verify_testing_works, name='fraud_verification'), path(\n 'class/<str:name>/', csrf_exempt(TestView.as_view()), name='test_class')]\n",
"step-4": "from django.urls import path\nfrom django.views.decorators.csrf import csrf_exempt\n\nfrom .views import TestView, index, setup_fraud_detection, verify_testing_works\n\nurlpatterns = [\n path('test/<str:name>/', index, name='index'),\n path('ml/setup/', setup_fraud_detection, name='fraud_detection_setup'),\n path('ml/verify/', verify_testing_works, name='fraud_verification'),\n path('class/<str:name>/', csrf_exempt(TestView.as_view()), name='test_class'),\n # path('mine/', MyView.as_view(), name='my-view'),\n]",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v1/proto/services/user_interest_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v1.proto.resources import user_interest_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v1/proto/services/user_interest_service.proto',
package='google.ads.googleads.v1.services',
syntax='proto3',
serialized_options=_b('\n$com.google.ads.googleads.v1.servicesB\030UserInterestServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V1.Services\312\002 Google\\Ads\\GoogleAds\\V1\\Services\352\002$Google::Ads::GoogleAds::V1::Services'),
serialized_pb=_b('\nBgoogle/ads/googleads_v1/proto/services/user_interest_service.proto\x12 google.ads.googleads.v1.services\x1a;google/ads/googleads_v1/proto/resources/user_interest.proto\x1a\x1cgoogle/api/annotations.proto\"/\n\x16GetUserInterestRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t2\xcd\x01\n\x13UserInterestService\x12\xb5\x01\n\x0fGetUserInterest\x12\x38.google.ads.googleads.v1.services.GetUserInterestRequest\x1a/.google.ads.googleads.v1.resources.UserInterest\"7\x82\xd3\xe4\x93\x02\x31\x12//v1/{resource_name=customers/*/userInterests/*}B\xff\x01\n$com.google.ads.googleads.v1.servicesB\x18UserInterestServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V1.Services\xca\x02 Google\\Ads\\GoogleAds\\V1\\Services\xea\x02$Google::Ads::GoogleAds::V1::Servicesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_GETUSERINTERESTREQUEST = _descriptor.Descriptor(
name='GetUserInterestRequest',
full_name='google.ads.googleads.v1.services.GetUserInterestRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v1.services.GetUserInterestRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=195,
serialized_end=242,
)
DESCRIPTOR.message_types_by_name['GetUserInterestRequest'] = _GETUSERINTERESTREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetUserInterestRequest = _reflection.GeneratedProtocolMessageType('GetUserInterestRequest', (_message.Message,), dict(
DESCRIPTOR = _GETUSERINTERESTREQUEST,
__module__ = 'google.ads.googleads_v1.proto.services.user_interest_service_pb2'
,
__doc__ = """Request message for
[UserInterestService.GetUserInterest][google.ads.googleads.v1.services.UserInterestService.GetUserInterest].
Attributes:
resource_name:
Resource name of the UserInterest to fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v1.services.GetUserInterestRequest)
))
_sym_db.RegisterMessage(GetUserInterestRequest)
DESCRIPTOR._options = None
_USERINTERESTSERVICE = _descriptor.ServiceDescriptor(
name='UserInterestService',
full_name='google.ads.googleads.v1.services.UserInterestService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=245,
serialized_end=450,
methods=[
_descriptor.MethodDescriptor(
name='GetUserInterest',
full_name='google.ads.googleads.v1.services.UserInterestService.GetUserInterest',
index=0,
containing_service=None,
input_type=_GETUSERINTERESTREQUEST,
output_type=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2._USERINTEREST,
serialized_options=_b('\202\323\344\223\0021\022//v1/{resource_name=customers/*/userInterests/*}'),
),
])
_sym_db.RegisterServiceDescriptor(_USERINTERESTSERVICE)
DESCRIPTOR.services_by_name['UserInterestService'] = _USERINTERESTSERVICE
# @@protoc_insertion_point(module_scope)
|
normal
|
{
"blob_id": "654586443e96f84aae70b3ce3263b0458a27334b",
"index": 473,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n_sym_db.RegisterFileDescriptor(DESCRIPTOR)\n<mask token>\n_sym_db.RegisterMessage(GetUserInterestRequest)\n<mask token>\n_sym_db.RegisterServiceDescriptor(_USERINTERESTSERVICE)\n<mask token>\n",
"step-3": "<mask token>\n_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1')\n )\n<mask token>\n_sym_db = _symbol_database.Default()\n<mask token>\nDESCRIPTOR = _descriptor.FileDescriptor(name=\n 'google/ads/googleads_v1/proto/services/user_interest_service.proto',\n package='google.ads.googleads.v1.services', syntax='proto3',\n serialized_options=_b(\n \"\"\"\n$com.google.ads.googleads.v1.servicesB\u0018UserInterestServiceProtoP\u0001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services¢\u0002\u0003GAAª\u0002 Google.Ads.GoogleAds.V1.ServicesÊ\u0002 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Servicesê\u0002$Google::Ads::GoogleAds::V1::Services\"\"\"\n ), serialized_pb=_b(\n '\\nBgoogle/ads/googleads_v1/proto/services/user_interest_service.proto\\x12 google.ads.googleads.v1.services\\x1a;google/ads/googleads_v1/proto/resources/user_interest.proto\\x1a\\x1cgoogle/api/annotations.proto\"/\\n\\x16GetUserInterestRequest\\x12\\x15\\n\\rresource_name\\x18\\x01 \\x01(\\t2Í\\x01\\n\\x13UserInterestService\\x12µ\\x01\\n\\x0fGetUserInterest\\x128.google.ads.googleads.v1.services.GetUserInterestRequest\\x1a/.google.ads.googleads.v1.resources.UserInterest\"7\\x82Óä\\x93\\x021\\x12//v1/{resource_name=customers/*/userInterests/*}Bÿ\\x01\\n$com.google.ads.googleads.v1.servicesB\\x18UserInterestServiceProtoP\\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services¢\\x02\\x03GAAª\\x02 Google.Ads.GoogleAds.V1.ServicesÊ\\x02 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Servicesê\\x02$Google::Ads::GoogleAds::V1::Servicesb\\x06proto3'\n ), dependencies=[\n google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\n .DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR])\n_GETUSERINTERESTREQUEST = _descriptor.Descriptor(name=\n 'GetUserInterestRequest', full_name=\n 'google.ads.googleads.v1.services.GetUserInterestRequest', filename=\n None, file=DESCRIPTOR, containing_type=None, fields=[_descriptor.\n FieldDescriptor(name='resource_name', full_name=\n 'google.ads.googleads.v1.services.GetUserInterestRequest.resource_name',\n index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False,\n default_value=_b('').decode('utf-8'), message_type=None, enum_type=None,\n containing_type=None, is_extension=False, extension_scope=None,\n serialized_options=None, file=DESCRIPTOR)], extensions=[], nested_types\n =[], enum_types=[], serialized_options=None, is_extendable=False,\n syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=195,\n serialized_end=242)\nDESCRIPTOR.message_types_by_name['GetUserInterestRequest'\n ] = _GETUSERINTERESTREQUEST\n_sym_db.RegisterFileDescriptor(DESCRIPTOR)\nGetUserInterestRequest = _reflection.GeneratedProtocolMessageType(\n 'GetUserInterestRequest', (_message.Message,), dict(DESCRIPTOR=\n _GETUSERINTERESTREQUEST, __module__=\n 'google.ads.googleads_v1.proto.services.user_interest_service_pb2',\n __doc__=\n \"\"\"Request message for\n [UserInterestService.GetUserInterest][google.ads.googleads.v1.services.UserInterestService.GetUserInterest].\n \n \n Attributes:\n resource_name:\n Resource name of the UserInterest to fetch.\n \"\"\"\n ))\n_sym_db.RegisterMessage(GetUserInterestRequest)\nDESCRIPTOR._options = None\n_USERINTERESTSERVICE = _descriptor.ServiceDescriptor(name=\n 'UserInterestService', full_name=\n 'google.ads.googleads.v1.services.UserInterestService', file=DESCRIPTOR,\n index=0, serialized_options=None, serialized_start=245, serialized_end=\n 450, methods=[_descriptor.MethodDescriptor(name='GetUserInterest',\n full_name=\n 'google.ads.googleads.v1.services.UserInterestService.GetUserInterest',\n index=0, containing_service=None, input_type=_GETUSERINTERESTREQUEST,\n output_type=\n google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\n ._USERINTEREST, serialized_options=_b(\n '\\x82Óä\\x93\\x021\\x12//v1/{resource_name=customers/*/userInterests/*}'))])\n_sym_db.RegisterServiceDescriptor(_USERINTERESTSERVICE)\nDESCRIPTOR.services_by_name['UserInterestService'] = _USERINTERESTSERVICE\n",
"step-4": "import sys\n_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode('latin1')\n )\nfrom google.protobuf import descriptor as _descriptor\nfrom google.protobuf import message as _message\nfrom google.protobuf import reflection as _reflection\nfrom google.protobuf import symbol_database as _symbol_database\n_sym_db = _symbol_database.Default()\nfrom google.ads.google_ads.v1.proto.resources import user_interest_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\nfrom google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2\nDESCRIPTOR = _descriptor.FileDescriptor(name=\n 'google/ads/googleads_v1/proto/services/user_interest_service.proto',\n package='google.ads.googleads.v1.services', syntax='proto3',\n serialized_options=_b(\n \"\"\"\n$com.google.ads.googleads.v1.servicesB\u0018UserInterestServiceProtoP\u0001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services¢\u0002\u0003GAAª\u0002 Google.Ads.GoogleAds.V1.ServicesÊ\u0002 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Servicesê\u0002$Google::Ads::GoogleAds::V1::Services\"\"\"\n ), serialized_pb=_b(\n '\\nBgoogle/ads/googleads_v1/proto/services/user_interest_service.proto\\x12 google.ads.googleads.v1.services\\x1a;google/ads/googleads_v1/proto/resources/user_interest.proto\\x1a\\x1cgoogle/api/annotations.proto\"/\\n\\x16GetUserInterestRequest\\x12\\x15\\n\\rresource_name\\x18\\x01 \\x01(\\t2Í\\x01\\n\\x13UserInterestService\\x12µ\\x01\\n\\x0fGetUserInterest\\x128.google.ads.googleads.v1.services.GetUserInterestRequest\\x1a/.google.ads.googleads.v1.resources.UserInterest\"7\\x82Óä\\x93\\x021\\x12//v1/{resource_name=customers/*/userInterests/*}Bÿ\\x01\\n$com.google.ads.googleads.v1.servicesB\\x18UserInterestServiceProtoP\\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services¢\\x02\\x03GAAª\\x02 Google.Ads.GoogleAds.V1.ServicesÊ\\x02 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Servicesê\\x02$Google::Ads::GoogleAds::V1::Servicesb\\x06proto3'\n ), dependencies=[\n google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\n .DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR])\n_GETUSERINTERESTREQUEST = _descriptor.Descriptor(name=\n 'GetUserInterestRequest', full_name=\n 'google.ads.googleads.v1.services.GetUserInterestRequest', filename=\n None, file=DESCRIPTOR, containing_type=None, fields=[_descriptor.\n FieldDescriptor(name='resource_name', full_name=\n 'google.ads.googleads.v1.services.GetUserInterestRequest.resource_name',\n index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False,\n default_value=_b('').decode('utf-8'), message_type=None, enum_type=None,\n containing_type=None, is_extension=False, extension_scope=None,\n serialized_options=None, file=DESCRIPTOR)], extensions=[], nested_types\n =[], enum_types=[], serialized_options=None, is_extendable=False,\n syntax='proto3', extension_ranges=[], oneofs=[], serialized_start=195,\n serialized_end=242)\nDESCRIPTOR.message_types_by_name['GetUserInterestRequest'\n ] = _GETUSERINTERESTREQUEST\n_sym_db.RegisterFileDescriptor(DESCRIPTOR)\nGetUserInterestRequest = _reflection.GeneratedProtocolMessageType(\n 'GetUserInterestRequest', (_message.Message,), dict(DESCRIPTOR=\n _GETUSERINTERESTREQUEST, __module__=\n 'google.ads.googleads_v1.proto.services.user_interest_service_pb2',\n __doc__=\n \"\"\"Request message for\n [UserInterestService.GetUserInterest][google.ads.googleads.v1.services.UserInterestService.GetUserInterest].\n \n \n Attributes:\n resource_name:\n Resource name of the UserInterest to fetch.\n \"\"\"\n ))\n_sym_db.RegisterMessage(GetUserInterestRequest)\nDESCRIPTOR._options = None\n_USERINTERESTSERVICE = _descriptor.ServiceDescriptor(name=\n 'UserInterestService', full_name=\n 'google.ads.googleads.v1.services.UserInterestService', file=DESCRIPTOR,\n index=0, serialized_options=None, serialized_start=245, serialized_end=\n 450, methods=[_descriptor.MethodDescriptor(name='GetUserInterest',\n full_name=\n 'google.ads.googleads.v1.services.UserInterestService.GetUserInterest',\n index=0, containing_service=None, input_type=_GETUSERINTERESTREQUEST,\n output_type=\n google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\n ._USERINTEREST, serialized_options=_b(\n '\\x82Óä\\x93\\x021\\x12//v1/{resource_name=customers/*/userInterests/*}'))])\n_sym_db.RegisterServiceDescriptor(_USERINTERESTSERVICE)\nDESCRIPTOR.services_by_name['UserInterestService'] = _USERINTERESTSERVICE\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by the protocol buffer compiler. DO NOT EDIT!\n# source: google/ads/googleads_v1/proto/services/user_interest_service.proto\n\nimport sys\n_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))\nfrom google.protobuf import descriptor as _descriptor\nfrom google.protobuf import message as _message\nfrom google.protobuf import reflection as _reflection\nfrom google.protobuf import symbol_database as _symbol_database\n# @@protoc_insertion_point(imports)\n\n_sym_db = _symbol_database.Default()\n\n\nfrom google.ads.google_ads.v1.proto.resources import user_interest_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2\nfrom google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2\n\n\nDESCRIPTOR = _descriptor.FileDescriptor(\n name='google/ads/googleads_v1/proto/services/user_interest_service.proto',\n package='google.ads.googleads.v1.services',\n syntax='proto3',\n serialized_options=_b('\\n$com.google.ads.googleads.v1.servicesB\\030UserInterestServiceProtoP\\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services\\242\\002\\003GAA\\252\\002 Google.Ads.GoogleAds.V1.Services\\312\\002 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Services\\352\\002$Google::Ads::GoogleAds::V1::Services'),\n serialized_pb=_b('\\nBgoogle/ads/googleads_v1/proto/services/user_interest_service.proto\\x12 google.ads.googleads.v1.services\\x1a;google/ads/googleads_v1/proto/resources/user_interest.proto\\x1a\\x1cgoogle/api/annotations.proto\\\"/\\n\\x16GetUserInterestRequest\\x12\\x15\\n\\rresource_name\\x18\\x01 \\x01(\\t2\\xcd\\x01\\n\\x13UserInterestService\\x12\\xb5\\x01\\n\\x0fGetUserInterest\\x12\\x38.google.ads.googleads.v1.services.GetUserInterestRequest\\x1a/.google.ads.googleads.v1.resources.UserInterest\\\"7\\x82\\xd3\\xe4\\x93\\x02\\x31\\x12//v1/{resource_name=customers/*/userInterests/*}B\\xff\\x01\\n$com.google.ads.googleads.v1.servicesB\\x18UserInterestServiceProtoP\\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v1/services;services\\xa2\\x02\\x03GAA\\xaa\\x02 Google.Ads.GoogleAds.V1.Services\\xca\\x02 Google\\\\Ads\\\\GoogleAds\\\\V1\\\\Services\\xea\\x02$Google::Ads::GoogleAds::V1::Servicesb\\x06proto3')\n ,\n dependencies=[google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])\n\n\n\n\n_GETUSERINTERESTREQUEST = _descriptor.Descriptor(\n name='GetUserInterestRequest',\n full_name='google.ads.googleads.v1.services.GetUserInterestRequest',\n filename=None,\n file=DESCRIPTOR,\n containing_type=None,\n fields=[\n _descriptor.FieldDescriptor(\n name='resource_name', full_name='google.ads.googleads.v1.services.GetUserInterestRequest.resource_name', index=0,\n number=1, type=9, cpp_type=9, label=1,\n has_default_value=False, default_value=_b(\"\").decode('utf-8'),\n message_type=None, enum_type=None, containing_type=None,\n is_extension=False, extension_scope=None,\n serialized_options=None, file=DESCRIPTOR),\n ],\n extensions=[\n ],\n nested_types=[],\n enum_types=[\n ],\n serialized_options=None,\n is_extendable=False,\n syntax='proto3',\n extension_ranges=[],\n oneofs=[\n ],\n serialized_start=195,\n serialized_end=242,\n)\n\nDESCRIPTOR.message_types_by_name['GetUserInterestRequest'] = _GETUSERINTERESTREQUEST\n_sym_db.RegisterFileDescriptor(DESCRIPTOR)\n\nGetUserInterestRequest = _reflection.GeneratedProtocolMessageType('GetUserInterestRequest', (_message.Message,), dict(\n DESCRIPTOR = _GETUSERINTERESTREQUEST,\n __module__ = 'google.ads.googleads_v1.proto.services.user_interest_service_pb2'\n ,\n __doc__ = \"\"\"Request message for\n [UserInterestService.GetUserInterest][google.ads.googleads.v1.services.UserInterestService.GetUserInterest].\n \n \n Attributes:\n resource_name:\n Resource name of the UserInterest to fetch.\n \"\"\",\n # @@protoc_insertion_point(class_scope:google.ads.googleads.v1.services.GetUserInterestRequest)\n ))\n_sym_db.RegisterMessage(GetUserInterestRequest)\n\n\nDESCRIPTOR._options = None\n\n_USERINTERESTSERVICE = _descriptor.ServiceDescriptor(\n name='UserInterestService',\n full_name='google.ads.googleads.v1.services.UserInterestService',\n file=DESCRIPTOR,\n index=0,\n serialized_options=None,\n serialized_start=245,\n serialized_end=450,\n methods=[\n _descriptor.MethodDescriptor(\n name='GetUserInterest',\n full_name='google.ads.googleads.v1.services.UserInterestService.GetUserInterest',\n index=0,\n containing_service=None,\n input_type=_GETUSERINTERESTREQUEST,\n output_type=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_user__interest__pb2._USERINTEREST,\n serialized_options=_b('\\202\\323\\344\\223\\0021\\022//v1/{resource_name=customers/*/userInterests/*}'),\n ),\n])\n_sym_db.RegisterServiceDescriptor(_USERINTERESTSERVICE)\n\nDESCRIPTOR.services_by_name['UserInterestService'] = _USERINTERESTSERVICE\n\n# @@protoc_insertion_point(module_scope)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from modeller import *
from modeller.automodel import *
# This part was within the script loop_modelling_2
# Here is is in a separate file for loop_modelling_3 so the script can be run in parallel
class MyLoop(dopehr_loopmodel):
def select_atoms(self):
# Here only the second loop atoms are allowed to move so we do not mess with the first loop we have previously refined
return selection(self.residue_range('218:', '231:'))
def select_loop_atoms(self):
return selection(self.residue_range('218:', '231:'))
|
normal
|
{
"blob_id": "d058c3df8513e07e4ff7035aa5c5885819e43687",
"index": 7295,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass MyLoop(dopehr_loopmodel):\n <mask token>\n\n def select_loop_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n",
"step-3": "<mask token>\n\n\nclass MyLoop(dopehr_loopmodel):\n\n def select_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n\n def select_loop_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n",
"step-4": "from modeller import *\nfrom modeller.automodel import *\n\n\nclass MyLoop(dopehr_loopmodel):\n\n def select_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n\n def select_loop_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n",
"step-5": "from modeller import *\nfrom modeller.automodel import * \n\n# This part was within the script loop_modelling_2\n# Here is is in a separate file for loop_modelling_3 so the script can be run in parallel\n\nclass MyLoop(dopehr_loopmodel):\n def select_atoms(self):\n\t # Here only the second loop atoms are allowed to move so we do not mess with the first loop we have previously refined\n return selection(self.residue_range('218:', '231:'))\n def select_loop_atoms(self):\n return selection(self.residue_range('218:', '231:'))\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
import sys
class Bus:
def __init__(self):
self.seats=0
self.dict_seats={}
self.num_passenger = 0
def conctructor(self,seats):
self.seats=seats
for i in range(1,self.seats+1):
self.dict_seats.update({i:"Free"})
return self.dict_seats
def getOn(self, passenger_name=None):
self.num_passenger += 1
if self.num_passenger > self.seats:
sys.exit(f'Sorry dear {passenger_name}. There is no free seat on the bus')
free_list = list(self.dict_seats.values())
free_num_seat = int(free_list.index("Free"))+1
self.dict_seats.update({free_num_seat : passenger_name})
def getOn_2(self,*names):
str_names=str(names)
str_names.strip("")
list_names=str_names.split(" ")
for i in list_names:
self.num_passenger += 1
if self.num_passenger > self.seats:
sys.exit(f'Sorry dear {i}. There is no free seat on the bus')
free_list=list(self.dict_seats.values())
free_num_seat=int(free_list.index("Free"))+1
self.dict_seats.update({free_num_seat : i})
def getOf(self,passenger_name=None):
self.num_passenger -= 1
close_list = list(self.dict_seats.values())
if passenger_name in close_list:
close_num_seat = int(close_list.index(passenger_name) + 1)
self.dict_seats.update({close_num_seat: "Free"})
else:
print(f'Passenger {passenger_name} is not on the bus')
def __str__(self):
return f'Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}'\
f'\nFree seats - {self.seats-self.num_passenger}'\
f'\nOther details - {self.dict_seats}'
|
normal
|
{
"blob_id": "1396509f65d194eeaefa3841e152b7078abf0032",
"index": 5549,
"step-1": "<mask token>\n\n\nclass Bus:\n <mask token>\n <mask token>\n <mask token>\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n <mask token>\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-3": "<mask token>\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(\n f'Sorry dear {passenger_name}. There is no free seat on the bus'\n )\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: passenger_name})\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-4": "import sys\n\n\nclass Bus:\n\n def __init__(self):\n self.seats = 0\n self.dict_seats = {}\n self.num_passenger = 0\n\n def conctructor(self, seats):\n self.seats = seats\n for i in range(1, self.seats + 1):\n self.dict_seats.update({i: 'Free'})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(\n f'Sorry dear {passenger_name}. There is no free seat on the bus'\n )\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: passenger_name})\n\n def getOn_2(self, *names):\n str_names = str(names)\n str_names.strip('')\n list_names = str_names.split(' ')\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index('Free')) + 1\n self.dict_seats.update({free_num_seat: i})\n\n def getOf(self, passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: 'Free'})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n\n def __str__(self):\n return f\"\"\"Number of seats on the bus - {self.seats}\nNumber of passenger - {self.num_passenger}\nFree seats - {self.seats - self.num_passenger}\nOther details - {self.dict_seats}\"\"\"\n",
"step-5": "import sys\nclass Bus:\n def __init__(self):\n self.seats=0\n self.dict_seats={}\n self.num_passenger = 0\n\n def conctructor(self,seats):\n self.seats=seats\n for i in range(1,self.seats+1):\n self.dict_seats.update({i:\"Free\"})\n return self.dict_seats\n\n def getOn(self, passenger_name=None):\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {passenger_name}. There is no free seat on the bus')\n free_list = list(self.dict_seats.values())\n free_num_seat = int(free_list.index(\"Free\"))+1\n self.dict_seats.update({free_num_seat : passenger_name})\n\n def getOn_2(self,*names):\n str_names=str(names)\n str_names.strip(\"\")\n list_names=str_names.split(\" \")\n for i in list_names:\n self.num_passenger += 1\n if self.num_passenger > self.seats:\n sys.exit(f'Sorry dear {i}. There is no free seat on the bus')\n free_list=list(self.dict_seats.values())\n free_num_seat=int(free_list.index(\"Free\"))+1\n self.dict_seats.update({free_num_seat : i})\n\n\n\n def getOf(self,passenger_name=None):\n self.num_passenger -= 1\n close_list = list(self.dict_seats.values())\n if passenger_name in close_list:\n close_num_seat = int(close_list.index(passenger_name) + 1)\n self.dict_seats.update({close_num_seat: \"Free\"})\n else:\n print(f'Passenger {passenger_name} is not on the bus')\n def __str__(self):\n return f'Number of seats on the bus - {self.seats}\\nNumber of passenger - {self.num_passenger}'\\\n f'\\nFree seats - {self.seats-self.num_passenger}'\\\n f'\\nOther details - {self.dict_seats}'\n\n",
"step-ids": [
3,
6,
7,
8,
9
]
}
|
[
3,
6,
7,
8,
9
] |
'''
Copyright 2014-2015 Reubenur Rahman
All Rights Reserved
@author: [email protected]
'''
import XenAPI
inputs = {'xenserver_master_ip': '15.22.18.17',
'xenserver_password': 'reuben',
'xenserver_user': 'root',
'vm_name': 'SLES11SP2x64',
'target_host': 'xenserver-2'
}
"""
NB: You need a shared storage to perform this action
"""
def main():
try:
print "Aquiring session with the provided xenserver IP..."
session = XenAPI.Session('http://' + inputs['xenserver_master_ip'])
print "Trying to connect to xenserver %s ..." % inputs['xenserver_master_ip']
session.xenapi.login_with_password(inputs['xenserver_user'], inputs['xenserver_password'])
print "Connected to xenserver !"
for vm_ref in session.xenapi.VM.get_by_name_label(inputs['vm_name']):
vm_uuid = session.xenapi.VM.get_uuid(vm_ref)
vm = session.xenapi.VM.get_by_uuid(vm_uuid)
for host_ref in session.xenapi.host.get_by_name_label(inputs['target_host']):
host_uuid = session.xenapi.host.get_uuid(host_ref)
target_host = session.xenapi.host.get_by_uuid(host_uuid)
print "Migrating VM using XenMotion..."
try:
session.xenapi.VM.pool_migrate(vm, target_host, {"live": "true"})
msg = "Successfully migrated VM %s to %s" % (inputs['vm_name'], inputs['target_host'])
print msg
except Exception, e:
print e
msg = "Failed to Migrate VM %s to %s " % (inputs['vm_name'], inputs['target_host'])
print msg
except Exception, e:
print "Caught exception: %s" % str(e)
session.logout()
# Start program
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "c173c4673fd716a8b88faf751639d52e9ea4ffab",
"index": 4482,
"step-1": "'''\nCopyright 2014-2015 Reubenur Rahman\nAll Rights Reserved\n@author: [email protected]\n'''\n\nimport XenAPI\n\ninputs = {'xenserver_master_ip': '15.22.18.17',\n 'xenserver_password': 'reuben',\n 'xenserver_user': 'root',\n 'vm_name': 'SLES11SP2x64',\n 'target_host': 'xenserver-2'\n }\n\"\"\"\nNB: You need a shared storage to perform this action\n\"\"\"\n\ndef main():\n try:\n print \"Aquiring session with the provided xenserver IP...\"\n session = XenAPI.Session('http://' + inputs['xenserver_master_ip'])\n print \"Trying to connect to xenserver %s ...\" % inputs['xenserver_master_ip']\n session.xenapi.login_with_password(inputs['xenserver_user'], inputs['xenserver_password'])\n print \"Connected to xenserver !\"\n\n for vm_ref in session.xenapi.VM.get_by_name_label(inputs['vm_name']):\n vm_uuid = session.xenapi.VM.get_uuid(vm_ref)\n\n vm = session.xenapi.VM.get_by_uuid(vm_uuid)\n\n for host_ref in session.xenapi.host.get_by_name_label(inputs['target_host']):\n host_uuid = session.xenapi.host.get_uuid(host_ref)\n\n target_host = session.xenapi.host.get_by_uuid(host_uuid)\n print \"Migrating VM using XenMotion...\"\n try:\n session.xenapi.VM.pool_migrate(vm, target_host, {\"live\": \"true\"})\n msg = \"Successfully migrated VM %s to %s\" % (inputs['vm_name'], inputs['target_host'])\n print msg\n except Exception, e:\n print e\n msg = \"Failed to Migrate VM %s to %s \" % (inputs['vm_name'], inputs['target_host'])\n print msg\n except Exception, e:\n print \"Caught exception: %s\" % str(e)\n\n session.logout()\n# Start program\nif __name__ == \"__main__\":\n main()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
# @Time : 2019/3/21 20:12
# @Author : for
# @File : test01.py
# @Software: PyCharm
import socket
s=socket.socket()
host=socket.gethostname()
port=3456
s.connect((host,port))
cmd=input(">>>")
s.sendall(cmd.encode())
data=s.recv(1024)
print(data.decode())
s.close()
|
normal
|
{
"blob_id": "596814032218c3db746f67e54e4f1863753aea06",
"index": 6299,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ns.connect((host, port))\n<mask token>\ns.sendall(cmd.encode())\n<mask token>\nprint(data.decode())\ns.close()\n",
"step-3": "<mask token>\ns = socket.socket()\nhost = socket.gethostname()\nport = 3456\ns.connect((host, port))\ncmd = input('>>>')\ns.sendall(cmd.encode())\ndata = s.recv(1024)\nprint(data.decode())\ns.close()\n",
"step-4": "import socket\ns = socket.socket()\nhost = socket.gethostname()\nport = 3456\ns.connect((host, port))\ncmd = input('>>>')\ns.sendall(cmd.encode())\ndata = s.recv(1024)\nprint(data.decode())\ns.close()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n# @Time : 2019/3/21 20:12\r\n# @Author : for \r\n# @File : test01.py\r\n# @Software: PyCharm\r\nimport socket\r\n\r\ns=socket.socket()\r\n\r\nhost=socket.gethostname()\r\nport=3456\r\ns.connect((host,port))\r\n\r\ncmd=input(\">>>\")\r\ns.sendall(cmd.encode())\r\ndata=s.recv(1024)\r\nprint(data.decode())\r\n\r\ns.close()\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.shortcuts import render, redirect
from .models import Courses
from django.views.generic import CreateView, ListView, UpdateView, DeleteView
from .forms import CourceCreateForm
from django.urls import reverse_lazy
from django.urls import reverse
class CourceListView(ListView):
model = Courses
template_name = 'cources/cource_list.html'
context_object_name = 'cources'
class CourceCreateView(CreateView):
template_name = 'cources/create_cource.html'
form_class = CourceCreateForm
success_url = reverse_lazy('cources:cource_list')
class CourceUpdateView(UpdateView):
model = Courses
form_class = CourceCreateForm
template_name = 'cources/course_update.html'
success_url = reverse_lazy('cources:cource_list')
def DeleteView(request, pk):
cource = Courses.objects.filter(pk=pk)
cource.delete()
return redirect(reverse('cources:cource_list'))
|
normal
|
{
"blob_id": "3340277df91f1421dab8d204eddce65b4604432b",
"index": 369,
"step-1": "<mask token>\n\n\nclass CourceCreateView(CreateView):\n template_name = 'cources/create_cource.html'\n form_class = CourceCreateForm\n success_url = reverse_lazy('cources:cource_list')\n\n\nclass CourceUpdateView(UpdateView):\n model = Courses\n form_class = CourceCreateForm\n template_name = 'cources/course_update.html'\n success_url = reverse_lazy('cources:cource_list')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CourceListView(ListView):\n model = Courses\n template_name = 'cources/cource_list.html'\n context_object_name = 'cources'\n\n\nclass CourceCreateView(CreateView):\n template_name = 'cources/create_cource.html'\n form_class = CourceCreateForm\n success_url = reverse_lazy('cources:cource_list')\n\n\nclass CourceUpdateView(UpdateView):\n model = Courses\n form_class = CourceCreateForm\n template_name = 'cources/course_update.html'\n success_url = reverse_lazy('cources:cource_list')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CourceListView(ListView):\n model = Courses\n template_name = 'cources/cource_list.html'\n context_object_name = 'cources'\n\n\nclass CourceCreateView(CreateView):\n template_name = 'cources/create_cource.html'\n form_class = CourceCreateForm\n success_url = reverse_lazy('cources:cource_list')\n\n\nclass CourceUpdateView(UpdateView):\n model = Courses\n form_class = CourceCreateForm\n template_name = 'cources/course_update.html'\n success_url = reverse_lazy('cources:cource_list')\n\n\ndef DeleteView(request, pk):\n cource = Courses.objects.filter(pk=pk)\n cource.delete()\n return redirect(reverse('cources:cource_list'))\n",
"step-4": "from django.shortcuts import render, redirect\nfrom .models import Courses\nfrom django.views.generic import CreateView, ListView, UpdateView, DeleteView\nfrom .forms import CourceCreateForm\nfrom django.urls import reverse_lazy\nfrom django.urls import reverse\n\n\nclass CourceListView(ListView):\n model = Courses\n template_name = 'cources/cource_list.html'\n context_object_name = 'cources'\n\n\nclass CourceCreateView(CreateView):\n template_name = 'cources/create_cource.html'\n form_class = CourceCreateForm\n success_url = reverse_lazy('cources:cource_list')\n\n\nclass CourceUpdateView(UpdateView):\n model = Courses\n form_class = CourceCreateForm\n template_name = 'cources/course_update.html'\n success_url = reverse_lazy('cources:cource_list')\n\n\ndef DeleteView(request, pk):\n cource = Courses.objects.filter(pk=pk)\n cource.delete()\n return redirect(reverse('cources:cource_list'))\n",
"step-5": null,
"step-ids": [
4,
6,
7,
8
]
}
|
[
4,
6,
7,
8
] |
import numpy as np
import dxchange
import ptychotomo
if __name__ == "__main__":
# read object
u = dxchange.read_tiff('data/init_object.tiff')
u = u+1j*u/2
nz, n, _ = u.shape
# parameters
center = n/2
ntheta = 384
ne = 3*n//2
ngpus = 1
pnz = nz//2
theta = np.linspace(0, 4*np.pi, ntheta).astype('float32')
# simulate data
with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus) as tslv:
data = tslv.fwd_tomo_batch(u)
# adjoint test with data padding
with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center+(ne-n)/2, ngpus) as tslv:
data = ptychotomo.utils.paddata(data, ne)
ua = tslv.adj_tomo_batch(data)
ua = ptychotomo.utils.unpadobject(ua, n)
print(f'norm data = {np.linalg.norm(data)}')
print(f'norm object = {np.linalg.norm(ua)}')
print(
f'<u,R*Ru>=<Ru,Ru>: {np.sum(u*np.conj(ua)):e} ? {np.sum(data*np.conj(data)):e}')
|
normal
|
{
"blob_id": "4ed6f4db4c9c3319d6289ba402f81bbd8accf915",
"index": 9782,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u + 1.0j * u / 2\n nz, n, _ = u.shape\n center = n / 2\n ntheta = 384\n ne = 3 * n // 2\n ngpus = 1\n pnz = nz // 2\n theta = np.linspace(0, 4 * np.pi, ntheta).astype('float32')\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus\n ) as tslv:\n data = tslv.fwd_tomo_batch(u)\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center + (ne - n\n ) / 2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u * np.conj(ua)):e} ? {np.sum(data * np.conj(data)):e}'\n )\n",
"step-3": "import numpy as np\nimport dxchange\nimport ptychotomo\nif __name__ == '__main__':\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u + 1.0j * u / 2\n nz, n, _ = u.shape\n center = n / 2\n ntheta = 384\n ne = 3 * n // 2\n ngpus = 1\n pnz = nz // 2\n theta = np.linspace(0, 4 * np.pi, ntheta).astype('float32')\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus\n ) as tslv:\n data = tslv.fwd_tomo_batch(u)\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center + (ne - n\n ) / 2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u * np.conj(ua)):e} ? {np.sum(data * np.conj(data)):e}'\n )\n",
"step-4": "import numpy as np\nimport dxchange\nimport ptychotomo\n\nif __name__ == \"__main__\":\n \n # read object\n u = dxchange.read_tiff('data/init_object.tiff')\n u = u+1j*u/2\n\n nz, n, _ = u.shape\n\n # parameters\n center = n/2\n ntheta = 384\n ne = 3*n//2\n ngpus = 1\n pnz = nz//2\n theta = np.linspace(0, 4*np.pi, ntheta).astype('float32')\n\n # simulate data\n with ptychotomo.SolverTomo(theta, ntheta, nz, n, pnz, center, ngpus) as tslv:\n data = tslv.fwd_tomo_batch(u)\n\n # adjoint test with data padding\n with ptychotomo.SolverTomo(theta, ntheta, nz, ne, pnz, center+(ne-n)/2, ngpus) as tslv:\n data = ptychotomo.utils.paddata(data, ne)\n ua = tslv.adj_tomo_batch(data)\n ua = ptychotomo.utils.unpadobject(ua, n)\n\n print(f'norm data = {np.linalg.norm(data)}')\n print(f'norm object = {np.linalg.norm(ua)}')\n print(\n f'<u,R*Ru>=<Ru,Ru>: {np.sum(u*np.conj(ua)):e} ? {np.sum(data*np.conj(data)):e}')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
import requests
BASE_URL = 'http://www.omdbapi.com/'
def rating_msg(rating):
if rating > 80:
return 'You should watch this movie right now!\n'
elif rating < 50:
return 'Avoid this movie at all cost!\n'
else:
return ''
api_key = sys.argv[1]
title = input('Enter the name of a movie: ')
data = {'apikey': api_key, 't': title}
r = requests.get(BASE_URL, data)
if r.status_code == requests.status_codes.codes.ok:
movie_data = json.loads(r.text)
if 'Error' in movie_data:
print(movie_data['Error'])
exit(1)
print(f'\nTitle: {movie_data["Title"]}')
print(f'Year: {movie_data["Year"]}')
print(f'Rating: {movie_data["Rated"]}')
print(f'Running Time: {movie_data["Runtime"]}')
print(f'\nDescription: {movie_data["Plot"]}')
print('\n' + rating_msg(int(movie_data['Metascore'])), end="")
else:
print(r)
|
normal
|
{
"blob_id": "7f33effa86fc3a80fce0e5e1ecf97ab4ca80402d",
"index": 1833,
"step-1": "<mask token>\n\n\ndef rating_msg(rating):\n if rating > 80:\n return 'You should watch this movie right now!\\n'\n elif rating < 50:\n return 'Avoid this movie at all cost!\\n'\n else:\n return ''\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef rating_msg(rating):\n if rating > 80:\n return 'You should watch this movie right now!\\n'\n elif rating < 50:\n return 'Avoid this movie at all cost!\\n'\n else:\n return ''\n\n\n<mask token>\nif r.status_code == requests.status_codes.codes.ok:\n movie_data = json.loads(r.text)\n if 'Error' in movie_data:\n print(movie_data['Error'])\n exit(1)\n print(f\"\\nTitle: {movie_data['Title']}\")\n print(f\"Year: {movie_data['Year']}\")\n print(f\"Rating: {movie_data['Rated']}\")\n print(f\"Running Time: {movie_data['Runtime']}\")\n print(f\"\\nDescription: {movie_data['Plot']}\")\n print('\\n' + rating_msg(int(movie_data['Metascore'])), end='')\nelse:\n print(r)\n",
"step-3": "<mask token>\nBASE_URL = 'http://www.omdbapi.com/'\n\n\ndef rating_msg(rating):\n if rating > 80:\n return 'You should watch this movie right now!\\n'\n elif rating < 50:\n return 'Avoid this movie at all cost!\\n'\n else:\n return ''\n\n\napi_key = sys.argv[1]\ntitle = input('Enter the name of a movie: ')\ndata = {'apikey': api_key, 't': title}\nr = requests.get(BASE_URL, data)\nif r.status_code == requests.status_codes.codes.ok:\n movie_data = json.loads(r.text)\n if 'Error' in movie_data:\n print(movie_data['Error'])\n exit(1)\n print(f\"\\nTitle: {movie_data['Title']}\")\n print(f\"Year: {movie_data['Year']}\")\n print(f\"Rating: {movie_data['Rated']}\")\n print(f\"Running Time: {movie_data['Runtime']}\")\n print(f\"\\nDescription: {movie_data['Plot']}\")\n print('\\n' + rating_msg(int(movie_data['Metascore'])), end='')\nelse:\n print(r)\n",
"step-4": "import sys\nimport json\nimport requests\nBASE_URL = 'http://www.omdbapi.com/'\n\n\ndef rating_msg(rating):\n if rating > 80:\n return 'You should watch this movie right now!\\n'\n elif rating < 50:\n return 'Avoid this movie at all cost!\\n'\n else:\n return ''\n\n\napi_key = sys.argv[1]\ntitle = input('Enter the name of a movie: ')\ndata = {'apikey': api_key, 't': title}\nr = requests.get(BASE_URL, data)\nif r.status_code == requests.status_codes.codes.ok:\n movie_data = json.loads(r.text)\n if 'Error' in movie_data:\n print(movie_data['Error'])\n exit(1)\n print(f\"\\nTitle: {movie_data['Title']}\")\n print(f\"Year: {movie_data['Year']}\")\n print(f\"Rating: {movie_data['Rated']}\")\n print(f\"Running Time: {movie_data['Runtime']}\")\n print(f\"\\nDescription: {movie_data['Plot']}\")\n print('\\n' + rating_msg(int(movie_data['Metascore'])), end='')\nelse:\n print(r)\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport sys\nimport json\n\nimport requests\n\n\nBASE_URL = 'http://www.omdbapi.com/'\n\n\ndef rating_msg(rating):\n if rating > 80:\n return 'You should watch this movie right now!\\n'\n elif rating < 50:\n return 'Avoid this movie at all cost!\\n'\n else:\n return ''\n\n\napi_key = sys.argv[1]\n\ntitle = input('Enter the name of a movie: ')\n\ndata = {'apikey': api_key, 't': title}\nr = requests.get(BASE_URL, data)\n\nif r.status_code == requests.status_codes.codes.ok:\n movie_data = json.loads(r.text)\n if 'Error' in movie_data:\n print(movie_data['Error'])\n exit(1)\n\n print(f'\\nTitle: {movie_data[\"Title\"]}')\n print(f'Year: {movie_data[\"Year\"]}')\n print(f'Rating: {movie_data[\"Rated\"]}')\n print(f'Running Time: {movie_data[\"Runtime\"]}')\n print(f'\\nDescription: {movie_data[\"Plot\"]}')\n\n print('\\n' + rating_msg(int(movie_data['Metascore'])), end=\"\")\nelse:\n print(r)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.contrib import admin
from .models import TutorialsReview, TutorialsReviewComment
admin.site.register(TutorialsReview)
admin.site.register(TutorialsReviewComment)
|
normal
|
{
"blob_id": "fea0619263b081f60ed0a4e178ef777a8d5dc988",
"index": 6500,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(TutorialsReview)\nadmin.site.register(TutorialsReviewComment)\n",
"step-3": "from django.contrib import admin\nfrom .models import TutorialsReview, TutorialsReviewComment\nadmin.site.register(TutorialsReview)\nadmin.site.register(TutorialsReviewComment)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import onmt
import torch.nn as nn
import torch.nn.functional as F
import torch
import torch.cuda
from torch.autograd import Variable
class CopyGenerator(nn.Module):
"""
Generator module that additionally considers copying
words directly from the source.
"""
def __init__(self, opt, src_dict, tgt_dict):
super(CopyGenerator, self).__init__()
self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())
self.linear_copy = nn.Linear(opt.rnn_size, 1)
self.src_dict = src_dict
self.tgt_dict = tgt_dict
def forward(self, hidden, attn, verbose=False):
"""
Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)
Args:
hidden (FloatTensor): (tgt_len*batch) x hidden
attn (FloatTensor): (tgt_len*batch) x src_len
Returns:
prob (FloatTensor): (tgt_len*batch) x vocab
attn (FloatTensor): (tgt_len*batch) x src_len
"""
# Original probabilities.
logits = self.linear(hidden)
logits[:, onmt.Constants.UNK] = -float('inf')
logits[:, onmt.Constants.PAD] = -float('inf')
prob = F.softmax(logits)
# Probability of copying p(z=1) batch
copy = F.sigmoid(self.linear_copy(hidden))
# Probibility of not copying: p_{word}(w) * (1 - p(z))
out_prob = torch.mul(prob, 1 - copy.expand_as(prob))
mul_attn = torch.mul(attn, copy.expand_as(attn))
return out_prob, mul_attn
def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):
v, mid = prob[0].data.max(0)
print("Initial:", self.tgt_dict.getLabel(mid[0], "FAIL"), v[0])
print("COPY %3f" % copy.data[0][0])
_, ids = attn[0].cpu().data.sort(0, descending=True)
for j in ids[:10].tolist():
src_idx = src[0, j].data[0]
print("\t%s\t\t%d\t%3f\t%3f" % (
self.src_dict.getLabel(src_idx),
j,
attn[0, j].data[0],
mul_attn[0, j].data[0]))
def CopyCriterion(probs, attn, targ, align, eps=1e-12):
copies = attn.mul(Variable(align)).sum(-1).add(eps)
# Can't use UNK, must copy.
out = torch.log(probs.gather(1, targ.view(-1, 1)).view(-1) + copies + eps)
out = out.mul(targ.ne(onmt.Constants.PAD).float())
return -out.sum()
|
normal
|
{
"blob_id": "704b3c57ca080862bed7a4caa65d1c8d5a32fa0b",
"index": 168,
"step-1": "<mask token>\n\n\nclass CopyGenerator(nn.Module):\n <mask token>\n\n def __init__(self, opt, src_dict, tgt_dict):\n super(CopyGenerator, self).__init__()\n self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())\n self.linear_copy = nn.Linear(opt.rnn_size, 1)\n self.src_dict = src_dict\n self.tgt_dict = tgt_dict\n\n def forward(self, hidden, attn, verbose=False):\n \"\"\"\n Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)\n\n\n Args:\n hidden (FloatTensor): (tgt_len*batch) x hidden\n attn (FloatTensor): (tgt_len*batch) x src_len\n\n Returns:\n prob (FloatTensor): (tgt_len*batch) x vocab\n attn (FloatTensor): (tgt_len*batch) x src_len\n \"\"\"\n logits = self.linear(hidden)\n logits[:, onmt.Constants.UNK] = -float('inf')\n logits[:, onmt.Constants.PAD] = -float('inf')\n prob = F.softmax(logits)\n copy = F.sigmoid(self.linear_copy(hidden))\n out_prob = torch.mul(prob, 1 - copy.expand_as(prob))\n mul_attn = torch.mul(attn, copy.expand_as(attn))\n return out_prob, mul_attn\n\n def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):\n v, mid = prob[0].data.max(0)\n print('Initial:', self.tgt_dict.getLabel(mid[0], 'FAIL'), v[0])\n print('COPY %3f' % copy.data[0][0])\n _, ids = attn[0].cpu().data.sort(0, descending=True)\n for j in ids[:10].tolist():\n src_idx = src[0, j].data[0]\n print('\\t%s\\t\\t%d\\t%3f\\t%3f' % (self.src_dict.getLabel(src_idx),\n j, attn[0, j].data[0], mul_attn[0, j].data[0]))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CopyGenerator(nn.Module):\n \"\"\"\n Generator module that additionally considers copying\n words directly from the source.\n \"\"\"\n\n def __init__(self, opt, src_dict, tgt_dict):\n super(CopyGenerator, self).__init__()\n self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())\n self.linear_copy = nn.Linear(opt.rnn_size, 1)\n self.src_dict = src_dict\n self.tgt_dict = tgt_dict\n\n def forward(self, hidden, attn, verbose=False):\n \"\"\"\n Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)\n\n\n Args:\n hidden (FloatTensor): (tgt_len*batch) x hidden\n attn (FloatTensor): (tgt_len*batch) x src_len\n\n Returns:\n prob (FloatTensor): (tgt_len*batch) x vocab\n attn (FloatTensor): (tgt_len*batch) x src_len\n \"\"\"\n logits = self.linear(hidden)\n logits[:, onmt.Constants.UNK] = -float('inf')\n logits[:, onmt.Constants.PAD] = -float('inf')\n prob = F.softmax(logits)\n copy = F.sigmoid(self.linear_copy(hidden))\n out_prob = torch.mul(prob, 1 - copy.expand_as(prob))\n mul_attn = torch.mul(attn, copy.expand_as(attn))\n return out_prob, mul_attn\n\n def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):\n v, mid = prob[0].data.max(0)\n print('Initial:', self.tgt_dict.getLabel(mid[0], 'FAIL'), v[0])\n print('COPY %3f' % copy.data[0][0])\n _, ids = attn[0].cpu().data.sort(0, descending=True)\n for j in ids[:10].tolist():\n src_idx = src[0, j].data[0]\n print('\\t%s\\t\\t%d\\t%3f\\t%3f' % (self.src_dict.getLabel(src_idx),\n j, attn[0, j].data[0], mul_attn[0, j].data[0]))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass CopyGenerator(nn.Module):\n \"\"\"\n Generator module that additionally considers copying\n words directly from the source.\n \"\"\"\n\n def __init__(self, opt, src_dict, tgt_dict):\n super(CopyGenerator, self).__init__()\n self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())\n self.linear_copy = nn.Linear(opt.rnn_size, 1)\n self.src_dict = src_dict\n self.tgt_dict = tgt_dict\n\n def forward(self, hidden, attn, verbose=False):\n \"\"\"\n Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)\n\n\n Args:\n hidden (FloatTensor): (tgt_len*batch) x hidden\n attn (FloatTensor): (tgt_len*batch) x src_len\n\n Returns:\n prob (FloatTensor): (tgt_len*batch) x vocab\n attn (FloatTensor): (tgt_len*batch) x src_len\n \"\"\"\n logits = self.linear(hidden)\n logits[:, onmt.Constants.UNK] = -float('inf')\n logits[:, onmt.Constants.PAD] = -float('inf')\n prob = F.softmax(logits)\n copy = F.sigmoid(self.linear_copy(hidden))\n out_prob = torch.mul(prob, 1 - copy.expand_as(prob))\n mul_attn = torch.mul(attn, copy.expand_as(attn))\n return out_prob, mul_attn\n\n def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):\n v, mid = prob[0].data.max(0)\n print('Initial:', self.tgt_dict.getLabel(mid[0], 'FAIL'), v[0])\n print('COPY %3f' % copy.data[0][0])\n _, ids = attn[0].cpu().data.sort(0, descending=True)\n for j in ids[:10].tolist():\n src_idx = src[0, j].data[0]\n print('\\t%s\\t\\t%d\\t%3f\\t%3f' % (self.src_dict.getLabel(src_idx),\n j, attn[0, j].data[0], mul_attn[0, j].data[0]))\n\n\ndef CopyCriterion(probs, attn, targ, align, eps=1e-12):\n copies = attn.mul(Variable(align)).sum(-1).add(eps)\n out = torch.log(probs.gather(1, targ.view(-1, 1)).view(-1) + copies + eps)\n out = out.mul(targ.ne(onmt.Constants.PAD).float())\n return -out.sum()\n",
"step-4": "import onmt\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch\nimport torch.cuda\nfrom torch.autograd import Variable\n\n\nclass CopyGenerator(nn.Module):\n \"\"\"\n Generator module that additionally considers copying\n words directly from the source.\n \"\"\"\n\n def __init__(self, opt, src_dict, tgt_dict):\n super(CopyGenerator, self).__init__()\n self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())\n self.linear_copy = nn.Linear(opt.rnn_size, 1)\n self.src_dict = src_dict\n self.tgt_dict = tgt_dict\n\n def forward(self, hidden, attn, verbose=False):\n \"\"\"\n Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)\n\n\n Args:\n hidden (FloatTensor): (tgt_len*batch) x hidden\n attn (FloatTensor): (tgt_len*batch) x src_len\n\n Returns:\n prob (FloatTensor): (tgt_len*batch) x vocab\n attn (FloatTensor): (tgt_len*batch) x src_len\n \"\"\"\n logits = self.linear(hidden)\n logits[:, onmt.Constants.UNK] = -float('inf')\n logits[:, onmt.Constants.PAD] = -float('inf')\n prob = F.softmax(logits)\n copy = F.sigmoid(self.linear_copy(hidden))\n out_prob = torch.mul(prob, 1 - copy.expand_as(prob))\n mul_attn = torch.mul(attn, copy.expand_as(attn))\n return out_prob, mul_attn\n\n def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):\n v, mid = prob[0].data.max(0)\n print('Initial:', self.tgt_dict.getLabel(mid[0], 'FAIL'), v[0])\n print('COPY %3f' % copy.data[0][0])\n _, ids = attn[0].cpu().data.sort(0, descending=True)\n for j in ids[:10].tolist():\n src_idx = src[0, j].data[0]\n print('\\t%s\\t\\t%d\\t%3f\\t%3f' % (self.src_dict.getLabel(src_idx),\n j, attn[0, j].data[0], mul_attn[0, j].data[0]))\n\n\ndef CopyCriterion(probs, attn, targ, align, eps=1e-12):\n copies = attn.mul(Variable(align)).sum(-1).add(eps)\n out = torch.log(probs.gather(1, targ.view(-1, 1)).view(-1) + copies + eps)\n out = out.mul(targ.ne(onmt.Constants.PAD).float())\n return -out.sum()\n",
"step-5": "import onmt\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch\nimport torch.cuda\nfrom torch.autograd import Variable\n\n\nclass CopyGenerator(nn.Module):\n \"\"\"\n Generator module that additionally considers copying\n words directly from the source.\n \"\"\"\n\n def __init__(self, opt, src_dict, tgt_dict):\n super(CopyGenerator, self).__init__()\n self.linear = nn.Linear(opt.rnn_size, tgt_dict.size())\n self.linear_copy = nn.Linear(opt.rnn_size, 1)\n self.src_dict = src_dict\n self.tgt_dict = tgt_dict\n\n def forward(self, hidden, attn, verbose=False):\n \"\"\"\n Computes p(w) = p(z=1) p_{copy}(w|z=0) + p(z=0) * p_{softmax}(w|z=0)\n\n\n Args:\n hidden (FloatTensor): (tgt_len*batch) x hidden\n attn (FloatTensor): (tgt_len*batch) x src_len\n\n Returns:\n prob (FloatTensor): (tgt_len*batch) x vocab\n attn (FloatTensor): (tgt_len*batch) x src_len\n \"\"\"\n # Original probabilities.\n logits = self.linear(hidden)\n logits[:, onmt.Constants.UNK] = -float('inf')\n logits[:, onmt.Constants.PAD] = -float('inf')\n prob = F.softmax(logits)\n\n # Probability of copying p(z=1) batch\n copy = F.sigmoid(self.linear_copy(hidden))\n\n # Probibility of not copying: p_{word}(w) * (1 - p(z))\n out_prob = torch.mul(prob, 1 - copy.expand_as(prob))\n mul_attn = torch.mul(attn, copy.expand_as(attn))\n return out_prob, mul_attn\n\n def _debug_copy(self, src, copy, prob, out_prob, attn, mul_attn):\n v, mid = prob[0].data.max(0)\n print(\"Initial:\", self.tgt_dict.getLabel(mid[0], \"FAIL\"), v[0])\n print(\"COPY %3f\" % copy.data[0][0])\n _, ids = attn[0].cpu().data.sort(0, descending=True)\n for j in ids[:10].tolist():\n src_idx = src[0, j].data[0]\n print(\"\\t%s\\t\\t%d\\t%3f\\t%3f\" % (\n self.src_dict.getLabel(src_idx),\n j,\n attn[0, j].data[0],\n mul_attn[0, j].data[0]))\n\n\ndef CopyCriterion(probs, attn, targ, align, eps=1e-12):\n copies = attn.mul(Variable(align)).sum(-1).add(eps)\n # Can't use UNK, must copy.\n out = torch.log(probs.gather(1, targ.view(-1, 1)).view(-1) + copies + eps)\n out = out.mul(targ.ne(onmt.Constants.PAD).float())\n return -out.sum()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from unittest import TestCase
from pydsf.exceptions import DSFServiceError
from pydsf.service.response import parse_response
from pydsf.service.translations import translate_input_fields, translate_output_fields
class MockMessage(object):
SUMMARY = "Error summary"
def __init__(self, error_code):
self.CODE = error_code
class MockError(object):
feilmelding = "Error message here."
class MockErrorDetail(object):
feil = MockError()
class MockResponseOK(object):
def __init__(self, error_code=None, result=True):
if result:
self.RESULT = "Results"
else:
self.MESSAGE = MockMessage(error_code)
class MockResponseError(object):
faultstring = "Fault string"
detail = MockErrorDetail()
def __init__(self):
pass
class MockResponseParsed(object):
HOV = list()
def __init__(self):
self.HOV.append(("FODT", "010107"))
self.HOV.append(("PERS", "50160"))
self.HOV.append(("NAVN-F", "TOMAS"))
self.HOV.append(("NAVN-M", ""))
self.HOV.append(("UKJENTFELT", "something"))
class PersonTests(TestCase):
def test_bad_search_fields(self):
pass
def test_error_response(self):
pass
def tets_empty_response(self):
pass
def test_result(self):
pass
class ResponseTests(TestCase):
def test_has_result(self):
result = MockResponseOK()
parsed = parse_response((200, result))
self.assertEqual(parsed, "Results")
def test_no_result(self):
result = MockResponseOK(error_code="1", result=False)
parsed = parse_response((200, result))
self.assertIsNone(parsed)
@staticmethod
def test_result_with_error():
result = MockResponseOK(error_code="2", result=False)
with pytest.raises(DSFServiceError):
parse_response((200, result))
@staticmethod
def test_uknown_error():
result = MockResponseError()
with pytest.raises(DSFServiceError):
parse_response((500, result))
class TranslationTests(TestCase):
def test_output_translation(self):
response = MockResponseParsed()
translated = translate_output_fields(response)
self.assertIsInstance(translated, dict)
self.assertTrue("date_of_birth" in translated)
self.assertTrue("person_number" in translated)
self.assertTrue("first_name" in translated)
self.assertTrue("middle_name" in translated)
# Verify capitalisation and None
self.assertEqual(translated["first_name"], "Tomas")
self.assertEqual(translated["UKJENTFELT"], "Something")
# Verify that empty strings are translated to None
self.assertIsNone(translated["middle_name"])
def test_input_translation(self):
valid_input = {
"end_user": "unicornis-test",
"first_name": "tomas",
"last_name": "topstad"
}
invalid_input_invalid_field = {
"end_user": "unicornis_test",
"invalidfield": "somevalue"
}
with pytest.raises(ValueError):
translate_input_fields(**invalid_input_invalid_field)
translated = translate_input_fields(**valid_input)
self.assertTrue("saksref" in translated)
self.assertTrue("fornavn" in translated)
self.assertTrue("etternavn" in translated)
|
normal
|
{
"blob_id": "bbff797fab4ac7dc7e6adb81c0eeda561f8ee147",
"index": 9603,
"step-1": "<mask token>\n\n\nclass MockResponseError(object):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass MockResponseParsed(object):\n HOV = list()\n\n def __init__(self):\n self.HOV.append(('FODT', '010107'))\n self.HOV.append(('PERS', '50160'))\n self.HOV.append(('NAVN-F', 'TOMAS'))\n self.HOV.append(('NAVN-M', ''))\n self.HOV.append(('UKJENTFELT', 'something'))\n\n\nclass PersonTests(TestCase):\n\n def test_bad_search_fields(self):\n pass\n\n def test_error_response(self):\n pass\n\n def tets_empty_response(self):\n pass\n\n def test_result(self):\n pass\n\n\nclass ResponseTests(TestCase):\n\n def test_has_result(self):\n result = MockResponseOK()\n parsed = parse_response((200, result))\n self.assertEqual(parsed, 'Results')\n\n def test_no_result(self):\n result = MockResponseOK(error_code='1', result=False)\n parsed = parse_response((200, result))\n self.assertIsNone(parsed)\n\n @staticmethod\n def test_result_with_error():\n result = MockResponseOK(error_code='2', result=False)\n with pytest.raises(DSFServiceError):\n parse_response((200, result))\n\n @staticmethod\n def test_uknown_error():\n result = MockResponseError()\n with pytest.raises(DSFServiceError):\n parse_response((500, result))\n\n\nclass TranslationTests(TestCase):\n\n def test_output_translation(self):\n response = MockResponseParsed()\n translated = translate_output_fields(response)\n self.assertIsInstance(translated, dict)\n self.assertTrue('date_of_birth' in translated)\n self.assertTrue('person_number' in translated)\n self.assertTrue('first_name' in translated)\n self.assertTrue('middle_name' in translated)\n self.assertEqual(translated['first_name'], 'Tomas')\n self.assertEqual(translated['UKJENTFELT'], 'Something')\n self.assertIsNone(translated['middle_name'])\n\n def test_input_translation(self):\n valid_input = {'end_user': 'unicornis-test', 'first_name': 'tomas',\n 'last_name': 'topstad'}\n invalid_input_invalid_field = {'end_user': 'unicornis_test',\n 'invalidfield': 'somevalue'}\n with pytest.raises(ValueError):\n translate_input_fields(**invalid_input_invalid_field)\n translated = translate_input_fields(**valid_input)\n self.assertTrue('saksref' in translated)\n self.assertTrue('fornavn' in translated)\n self.assertTrue('etternavn' in translated)\n",
"step-2": "<mask token>\n\n\nclass MockError(object):\n <mask token>\n\n\nclass MockErrorDetail(object):\n feil = MockError()\n\n\nclass MockResponseOK(object):\n\n def __init__(self, error_code=None, result=True):\n if result:\n self.RESULT = 'Results'\n else:\n self.MESSAGE = MockMessage(error_code)\n\n\nclass MockResponseError(object):\n faultstring = 'Fault string'\n detail = MockErrorDetail()\n\n def __init__(self):\n pass\n\n\nclass MockResponseParsed(object):\n HOV = list()\n\n def __init__(self):\n self.HOV.append(('FODT', '010107'))\n self.HOV.append(('PERS', '50160'))\n self.HOV.append(('NAVN-F', 'TOMAS'))\n self.HOV.append(('NAVN-M', ''))\n self.HOV.append(('UKJENTFELT', 'something'))\n\n\nclass PersonTests(TestCase):\n\n def test_bad_search_fields(self):\n pass\n\n def test_error_response(self):\n pass\n\n def tets_empty_response(self):\n pass\n\n def test_result(self):\n pass\n\n\nclass ResponseTests(TestCase):\n\n def test_has_result(self):\n result = MockResponseOK()\n parsed = parse_response((200, result))\n self.assertEqual(parsed, 'Results')\n\n def test_no_result(self):\n result = MockResponseOK(error_code='1', result=False)\n parsed = parse_response((200, result))\n self.assertIsNone(parsed)\n\n @staticmethod\n def test_result_with_error():\n result = MockResponseOK(error_code='2', result=False)\n with pytest.raises(DSFServiceError):\n parse_response((200, result))\n\n @staticmethod\n def test_uknown_error():\n result = MockResponseError()\n with pytest.raises(DSFServiceError):\n parse_response((500, result))\n\n\nclass TranslationTests(TestCase):\n\n def test_output_translation(self):\n response = MockResponseParsed()\n translated = translate_output_fields(response)\n self.assertIsInstance(translated, dict)\n self.assertTrue('date_of_birth' in translated)\n self.assertTrue('person_number' in translated)\n self.assertTrue('first_name' in translated)\n self.assertTrue('middle_name' in translated)\n self.assertEqual(translated['first_name'], 'Tomas')\n self.assertEqual(translated['UKJENTFELT'], 'Something')\n self.assertIsNone(translated['middle_name'])\n\n def test_input_translation(self):\n valid_input = {'end_user': 'unicornis-test', 'first_name': 'tomas',\n 'last_name': 'topstad'}\n invalid_input_invalid_field = {'end_user': 'unicornis_test',\n 'invalidfield': 'somevalue'}\n with pytest.raises(ValueError):\n translate_input_fields(**invalid_input_invalid_field)\n translated = translate_input_fields(**valid_input)\n self.assertTrue('saksref' in translated)\n self.assertTrue('fornavn' in translated)\n self.assertTrue('etternavn' in translated)\n",
"step-3": "<mask token>\n\n\nclass MockMessage(object):\n <mask token>\n <mask token>\n\n\nclass MockError(object):\n feilmelding = 'Error message here.'\n\n\nclass MockErrorDetail(object):\n feil = MockError()\n\n\nclass MockResponseOK(object):\n\n def __init__(self, error_code=None, result=True):\n if result:\n self.RESULT = 'Results'\n else:\n self.MESSAGE = MockMessage(error_code)\n\n\nclass MockResponseError(object):\n faultstring = 'Fault string'\n detail = MockErrorDetail()\n\n def __init__(self):\n pass\n\n\nclass MockResponseParsed(object):\n HOV = list()\n\n def __init__(self):\n self.HOV.append(('FODT', '010107'))\n self.HOV.append(('PERS', '50160'))\n self.HOV.append(('NAVN-F', 'TOMAS'))\n self.HOV.append(('NAVN-M', ''))\n self.HOV.append(('UKJENTFELT', 'something'))\n\n\nclass PersonTests(TestCase):\n\n def test_bad_search_fields(self):\n pass\n\n def test_error_response(self):\n pass\n\n def tets_empty_response(self):\n pass\n\n def test_result(self):\n pass\n\n\nclass ResponseTests(TestCase):\n\n def test_has_result(self):\n result = MockResponseOK()\n parsed = parse_response((200, result))\n self.assertEqual(parsed, 'Results')\n\n def test_no_result(self):\n result = MockResponseOK(error_code='1', result=False)\n parsed = parse_response((200, result))\n self.assertIsNone(parsed)\n\n @staticmethod\n def test_result_with_error():\n result = MockResponseOK(error_code='2', result=False)\n with pytest.raises(DSFServiceError):\n parse_response((200, result))\n\n @staticmethod\n def test_uknown_error():\n result = MockResponseError()\n with pytest.raises(DSFServiceError):\n parse_response((500, result))\n\n\nclass TranslationTests(TestCase):\n\n def test_output_translation(self):\n response = MockResponseParsed()\n translated = translate_output_fields(response)\n self.assertIsInstance(translated, dict)\n self.assertTrue('date_of_birth' in translated)\n self.assertTrue('person_number' in translated)\n self.assertTrue('first_name' in translated)\n self.assertTrue('middle_name' in translated)\n self.assertEqual(translated['first_name'], 'Tomas')\n self.assertEqual(translated['UKJENTFELT'], 'Something')\n self.assertIsNone(translated['middle_name'])\n\n def test_input_translation(self):\n valid_input = {'end_user': 'unicornis-test', 'first_name': 'tomas',\n 'last_name': 'topstad'}\n invalid_input_invalid_field = {'end_user': 'unicornis_test',\n 'invalidfield': 'somevalue'}\n with pytest.raises(ValueError):\n translate_input_fields(**invalid_input_invalid_field)\n translated = translate_input_fields(**valid_input)\n self.assertTrue('saksref' in translated)\n self.assertTrue('fornavn' in translated)\n self.assertTrue('etternavn' in translated)\n",
"step-4": "from __future__ import unicode_literals\nimport pytest\nfrom unittest import TestCase\nfrom pydsf.exceptions import DSFServiceError\nfrom pydsf.service.response import parse_response\nfrom pydsf.service.translations import translate_input_fields, translate_output_fields\n\n\nclass MockMessage(object):\n SUMMARY = 'Error summary'\n\n def __init__(self, error_code):\n self.CODE = error_code\n\n\nclass MockError(object):\n feilmelding = 'Error message here.'\n\n\nclass MockErrorDetail(object):\n feil = MockError()\n\n\nclass MockResponseOK(object):\n\n def __init__(self, error_code=None, result=True):\n if result:\n self.RESULT = 'Results'\n else:\n self.MESSAGE = MockMessage(error_code)\n\n\nclass MockResponseError(object):\n faultstring = 'Fault string'\n detail = MockErrorDetail()\n\n def __init__(self):\n pass\n\n\nclass MockResponseParsed(object):\n HOV = list()\n\n def __init__(self):\n self.HOV.append(('FODT', '010107'))\n self.HOV.append(('PERS', '50160'))\n self.HOV.append(('NAVN-F', 'TOMAS'))\n self.HOV.append(('NAVN-M', ''))\n self.HOV.append(('UKJENTFELT', 'something'))\n\n\nclass PersonTests(TestCase):\n\n def test_bad_search_fields(self):\n pass\n\n def test_error_response(self):\n pass\n\n def tets_empty_response(self):\n pass\n\n def test_result(self):\n pass\n\n\nclass ResponseTests(TestCase):\n\n def test_has_result(self):\n result = MockResponseOK()\n parsed = parse_response((200, result))\n self.assertEqual(parsed, 'Results')\n\n def test_no_result(self):\n result = MockResponseOK(error_code='1', result=False)\n parsed = parse_response((200, result))\n self.assertIsNone(parsed)\n\n @staticmethod\n def test_result_with_error():\n result = MockResponseOK(error_code='2', result=False)\n with pytest.raises(DSFServiceError):\n parse_response((200, result))\n\n @staticmethod\n def test_uknown_error():\n result = MockResponseError()\n with pytest.raises(DSFServiceError):\n parse_response((500, result))\n\n\nclass TranslationTests(TestCase):\n\n def test_output_translation(self):\n response = MockResponseParsed()\n translated = translate_output_fields(response)\n self.assertIsInstance(translated, dict)\n self.assertTrue('date_of_birth' in translated)\n self.assertTrue('person_number' in translated)\n self.assertTrue('first_name' in translated)\n self.assertTrue('middle_name' in translated)\n self.assertEqual(translated['first_name'], 'Tomas')\n self.assertEqual(translated['UKJENTFELT'], 'Something')\n self.assertIsNone(translated['middle_name'])\n\n def test_input_translation(self):\n valid_input = {'end_user': 'unicornis-test', 'first_name': 'tomas',\n 'last_name': 'topstad'}\n invalid_input_invalid_field = {'end_user': 'unicornis_test',\n 'invalidfield': 'somevalue'}\n with pytest.raises(ValueError):\n translate_input_fields(**invalid_input_invalid_field)\n translated = translate_input_fields(**valid_input)\n self.assertTrue('saksref' in translated)\n self.assertTrue('fornavn' in translated)\n self.assertTrue('etternavn' in translated)\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nimport pytest\nfrom unittest import TestCase\n\nfrom pydsf.exceptions import DSFServiceError\nfrom pydsf.service.response import parse_response\nfrom pydsf.service.translations import translate_input_fields, translate_output_fields\n\n\nclass MockMessage(object):\n SUMMARY = \"Error summary\"\n\n def __init__(self, error_code):\n self.CODE = error_code\n\n\nclass MockError(object):\n feilmelding = \"Error message here.\"\n\n\nclass MockErrorDetail(object):\n feil = MockError()\n\n\nclass MockResponseOK(object):\n\n def __init__(self, error_code=None, result=True):\n if result:\n self.RESULT = \"Results\"\n else:\n self.MESSAGE = MockMessage(error_code)\n\n\nclass MockResponseError(object):\n\n faultstring = \"Fault string\"\n detail = MockErrorDetail()\n\n def __init__(self):\n pass\n\n\nclass MockResponseParsed(object):\n HOV = list()\n\n def __init__(self):\n self.HOV.append((\"FODT\", \"010107\"))\n self.HOV.append((\"PERS\", \"50160\"))\n self.HOV.append((\"NAVN-F\", \"TOMAS\"))\n self.HOV.append((\"NAVN-M\", \"\"))\n self.HOV.append((\"UKJENTFELT\", \"something\"))\n\n\nclass PersonTests(TestCase):\n\n def test_bad_search_fields(self):\n pass\n\n def test_error_response(self):\n pass\n\n def tets_empty_response(self):\n pass\n\n def test_result(self):\n pass\n\n\nclass ResponseTests(TestCase):\n\n def test_has_result(self):\n result = MockResponseOK()\n parsed = parse_response((200, result))\n\n self.assertEqual(parsed, \"Results\")\n\n def test_no_result(self):\n result = MockResponseOK(error_code=\"1\", result=False)\n\n parsed = parse_response((200, result))\n\n self.assertIsNone(parsed)\n\n @staticmethod\n def test_result_with_error():\n result = MockResponseOK(error_code=\"2\", result=False)\n\n with pytest.raises(DSFServiceError):\n parse_response((200, result))\n\n @staticmethod\n def test_uknown_error():\n result = MockResponseError()\n\n with pytest.raises(DSFServiceError):\n parse_response((500, result))\n\n\nclass TranslationTests(TestCase):\n\n def test_output_translation(self):\n response = MockResponseParsed()\n translated = translate_output_fields(response)\n\n self.assertIsInstance(translated, dict)\n self.assertTrue(\"date_of_birth\" in translated)\n self.assertTrue(\"person_number\" in translated)\n self.assertTrue(\"first_name\" in translated)\n self.assertTrue(\"middle_name\" in translated)\n\n # Verify capitalisation and None\n self.assertEqual(translated[\"first_name\"], \"Tomas\")\n self.assertEqual(translated[\"UKJENTFELT\"], \"Something\")\n\n # Verify that empty strings are translated to None\n self.assertIsNone(translated[\"middle_name\"])\n\n def test_input_translation(self):\n valid_input = {\n \"end_user\": \"unicornis-test\",\n \"first_name\": \"tomas\",\n \"last_name\": \"topstad\"\n }\n invalid_input_invalid_field = {\n \"end_user\": \"unicornis_test\",\n \"invalidfield\": \"somevalue\"\n }\n\n with pytest.raises(ValueError):\n translate_input_fields(**invalid_input_invalid_field)\n\n translated = translate_input_fields(**valid_input)\n self.assertTrue(\"saksref\" in translated)\n self.assertTrue(\"fornavn\" in translated)\n self.assertTrue(\"etternavn\" in translated)\n",
"step-ids": [
17,
24,
26,
29,
30
]
}
|
[
17,
24,
26,
29,
30
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 17 17:24:39 2020
@author: code
"""
import sys
import keras
import cv2
import numpy
import matplotlib
import skimage
print('Python: {}'.format(sys.version))
print('Numpy: {}'.format(numpy.__version__))
print('Keras: {}'.format(keras.__version__))
print('Matplotlib: {}'.format(matplotlib.__version__))
print('OpenCV: {}'.format(cv2.__version__))
print('Skimage: {}'.format(skimage.__version__))
#import necessary packages
from keras.models import Sequential
from keras.layers import Conv2D, Input
from keras.optimizers import SGD, Adam
from skimage.measure import compare_ssim as ssim
from matplotlib import pyplot as plt
import cv2
import numpy as np
import math
import os
#define A function for peak signal to noise ration(PSNR)
def psnr(target, ref):
#assume RGB/BGR image
target_data = target.astype(float)
ref_data = ref.astype(float)
diff = ref_data - target_data
diff = diff.flatten('C')
rmse = math.sqrt(np.mean(diff ** 2))
return 20*math.log10(255. / rmse)
#define function for mean Squared error(MSE)
def mse(target, ref):
#mse is the sum pf the squared difference between the two image
err = np.sum((target.astype('float'))** 2)
err /= float(target.shape[0] *target.shape[1])
return err
#define function that combines all three image quality metrics
def compare_images(target, ref):
scores = []
scores.append(psnr(target, ref))
scores.append(mse(target, ref))
scores.append(ssim(target, ref, multichannel = True))
return scores
#prepare degraded images by introducing quality distortions via resizing
def prepare_images(path, factor):
#loop throgh filesin the directory
for file in os.listdir(path):
#open the file
img = cv2.imread(path +'/' + file)
#find old and new image dimensions
h, w, c = img.shape
new_height = h / factor
new_width = w / factor
#resize the image -down
img = (cv2.resize(img, (int(new_width), int(new_height)), interpolation = cv2.INTER_LINEAR))
img = (cv2.resize(img, (int(w), int(h)), interpolation = cv2.INTER_LINEAR))
#save the image
print('Saving {}'.format(file))
cv2.imwrite('images//{}'.format(file), img)
prepare_images('source_images/', 2)
#testing the generated images using image quality matrics
for file in os.listdir('images/'):
#open target and reference images
target = cv2.imread('images/{}'.format(file))
ref = cv2.imread('source_images/{}'.format(file))
#calculate the scores
scores = compare_images(target, ref)
#print all three scores
print('{}\nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(file, scores[0], scores[1], scores[2]))
#define the SRCNN model
def model():
#define the model type
SRCNN = Sequential()
#add model layers
SRCNN.add(Conv2D(filters = 128, kernel_size = (9,9), activation ='relu', padding = 'valid', use_bias = True, input_shape = (None, None, 1)))
SRCNN.add(Conv2D(filters = 64, kernel_size = (3,3), activation ='relu', padding = 'same', use_bias = True ))
SRCNN.add(Conv2D(filters = 1, kernel_size = (5,5), activation ='linear', padding = 'valid', use_bias = True))
#define optimizer
adam = Adam(learning_rate = 0.0003)
#compile model
SRCNN.compile(loss ='mean_squared_error', optimizer = adam, metrics =['mean_squared_error'])
return SRCNN
#define necessary image processing functions
def modcrop(img, scale):
tmpsz = img.shape
sz= tmpsz[0:2]
sz = sz - np.mod(sz, scale)
img = img[0:sz[0], 1:sz[1]]
return img
def shave(image, border):
img = image[border: -border, border: -border]
return img
#define main prediction function
def predict(image_path):
#load the srcnn model with weights
srcnn =model()
srcnn.load_weights('3051crop_weight_200.h5')
#load the degraded and reference images
path, file =os.path.split(image_path)
degraded = cv2.imread(image_path)
ref = cv2.imread('source_images/{}'.format(file))
#preprocess the image with modcrop
ref = modcrop(ref, 3)
degraded = modcrop(degraded, 3)
#convert the image to YCrCb -srcnn trained on Y channel
temp =cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)
#create image slice and normalize
Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype = float)
Y[0, :, :, 0] = temp[:, :, 0].astype(float)/ 255
#perform super resolution with srcnn
pre = srcnn.predict(Y, batch_size = 1)
#post process the output
pre*= 255
pre[pre[:] > 255] = 255
pre[pre[:] < 0] = 0
pre = pre.astype(np.uint8)
#copy Y channel back to image and convert to BGR
temp = shave(temp, 6)
temp[:, :, 0] = pre[0, :, :, 0]
output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)
#remove border from reference and degraded image
ref = shave(ref.astype(np.uint8), 6)
degraded = shave(degraded.astype(np.uint8), 6)
#image quality calculations
scores = []
scores.append(compare_images(degraded, ref))
scores.append(compare_images(output, ref))
#return images and scores
return ref, degraded, output, scores
ref, degraded, output, scores = predict('images/flowers.bmp')
#print all score for all images
print('Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(scores[0][0], scores[0][1], scores[0][2]))
print('Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n'.format(scores[1][0], scores[1][1], scores[1][2]))
#display images as subplots
fig, axs = plt.subplots(1, 3, figsize = (20, 8))
axs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))
axs[0].set_title('Original')
axs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))
axs[1].set_title('Degraded')
axs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))
axs[2].set_title('SRCNN')
#remove the x and y tick marks
for ax in axs:
ax.set_xticks([])
ax.set_yticks([])
|
normal
|
{
"blob_id": "e086bebaa166abeea066fe49076f1b007858951f",
"index": 7052,
"step-1": "<mask token>\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\n<mask token>\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\n<mask token>\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\n<mask token>\n",
"step-3": "<mask token>\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\nprepare_images('source_images/', 2)\nfor file in os.listdir('images/'):\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n scores = compare_images(target, ref)\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0],\n scores[1], scores[2]))\n\n\ndef model():\n SRCNN = Sequential()\n SRCNN.add(Conv2D(filters=128, kernel_size=(9, 9), activation='relu',\n padding='valid', use_bias=True, input_shape=(None, None, 1)))\n SRCNN.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu',\n padding='same', use_bias=True))\n SRCNN.add(Conv2D(filters=1, kernel_size=(5, 5), activation='linear',\n padding='valid', use_bias=True))\n adam = Adam(learning_rate=0.0003)\n SRCNN.compile(loss='mean_squared_error', optimizer=adam, metrics=[\n 'mean_squared_error'])\n return SRCNN\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\n<mask token>\nprint(\"\"\"Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[0][0],\n scores[0][1], scores[0][2]))\nprint(\"\"\"Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[\n 1][0], scores[1][1], scores[1][2]))\n<mask token>\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])\n",
"step-4": "<mask token>\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n<mask token>\n\n\ndef psnr(target, ref):\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n diff = ref_data - target_data\n diff = diff.flatten('C')\n rmse = math.sqrt(np.mean(diff ** 2))\n return 20 * math.log10(255.0 / rmse)\n\n\ndef mse(target, ref):\n err = np.sum(target.astype('float') ** 2)\n err /= float(target.shape[0] * target.shape[1])\n return err\n\n\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel=True))\n return scores\n\n\ndef prepare_images(path, factor):\n for file in os.listdir(path):\n img = cv2.imread(path + '/' + file)\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n img = cv2.resize(img, (int(new_width), int(new_height)),\n interpolation=cv2.INTER_LINEAR)\n img = cv2.resize(img, (int(w), int(h)), interpolation=cv2.INTER_LINEAR)\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n\n\nprepare_images('source_images/', 2)\nfor file in os.listdir('images/'):\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n scores = compare_images(target, ref)\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0],\n scores[1], scores[2]))\n\n\ndef model():\n SRCNN = Sequential()\n SRCNN.add(Conv2D(filters=128, kernel_size=(9, 9), activation='relu',\n padding='valid', use_bias=True, input_shape=(None, None, 1)))\n SRCNN.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu',\n padding='same', use_bias=True))\n SRCNN.add(Conv2D(filters=1, kernel_size=(5, 5), activation='linear',\n padding='valid', use_bias=True))\n adam = Adam(learning_rate=0.0003)\n SRCNN.compile(loss='mean_squared_error', optimizer=adam, metrics=[\n 'mean_squared_error'])\n return SRCNN\n\n\ndef modcrop(img, scale):\n tmpsz = img.shape\n sz = tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border:-border, border:-border]\n return img\n\n\ndef predict(image_path):\n srcnn = model()\n srcnn.load_weights('3051crop_weight_200.h5')\n path, file = os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n temp = cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype=float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float) / 255\n pre = srcnn.predict(Y, batch_size=1)\n pre *= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n return ref, degraded, output, scores\n\n\nref, degraded, output, scores = predict('images/flowers.bmp')\nprint(\"\"\"Degraded Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[0][0],\n scores[0][1], scores[0][2]))\nprint(\"\"\"Reconstructed Image: \nPSNR: {}\nMSE: {}\nSSIM: {}\n\"\"\".format(scores[\n 1][0], scores[1][1], scores[1][2]))\nfig, axs = plt.subplots(1, 3, figsize=(20, 8))\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun May 17 17:24:39 2020\n\n@author: code\n\"\"\"\n\nimport sys\nimport keras\nimport cv2\nimport numpy\nimport matplotlib\nimport skimage\n\nprint('Python: {}'.format(sys.version))\nprint('Numpy: {}'.format(numpy.__version__))\nprint('Keras: {}'.format(keras.__version__))\nprint('Matplotlib: {}'.format(matplotlib.__version__))\nprint('OpenCV: {}'.format(cv2.__version__))\nprint('Skimage: {}'.format(skimage.__version__))\n\n\n#import necessary packages\nfrom keras.models import Sequential\nfrom keras.layers import Conv2D, Input\nfrom keras.optimizers import SGD, Adam\nfrom skimage.measure import compare_ssim as ssim\nfrom matplotlib import pyplot as plt\nimport cv2\nimport numpy as np\nimport math\nimport os\n\n#define A function for peak signal to noise ration(PSNR)\ndef psnr(target, ref):\n #assume RGB/BGR image\n target_data = target.astype(float)\n ref_data = ref.astype(float)\n \n diff = ref_data - target_data\n diff = diff.flatten('C')\n \n rmse = math.sqrt(np.mean(diff ** 2))\n \n return 20*math.log10(255. / rmse)\n\n\n#define function for mean Squared error(MSE)\ndef mse(target, ref):\n #mse is the sum pf the squared difference between the two image\n \n err = np.sum((target.astype('float'))** 2)\n err /= float(target.shape[0] *target.shape[1])\n \n return err\n \n#define function that combines all three image quality metrics\ndef compare_images(target, ref):\n scores = []\n scores.append(psnr(target, ref))\n scores.append(mse(target, ref))\n scores.append(ssim(target, ref, multichannel = True))\n \n return scores\n\n#prepare degraded images by introducing quality distortions via resizing\n \ndef prepare_images(path, factor):\n \n #loop throgh filesin the directory\n for file in os.listdir(path):\n \n #open the file\n img = cv2.imread(path +'/' + file)\n \n #find old and new image dimensions\n h, w, c = img.shape\n new_height = h / factor\n new_width = w / factor\n \n #resize the image -down\n img = (cv2.resize(img, (int(new_width), int(new_height)), interpolation = cv2.INTER_LINEAR))\n img = (cv2.resize(img, (int(w), int(h)), interpolation = cv2.INTER_LINEAR))\n \n #save the image\n print('Saving {}'.format(file))\n cv2.imwrite('images//{}'.format(file), img)\n \nprepare_images('source_images/', 2)\n\n#testing the generated images using image quality matrics\n\nfor file in os.listdir('images/'):\n \n #open target and reference images\n target = cv2.imread('images/{}'.format(file))\n ref = cv2.imread('source_images/{}'.format(file))\n \n #calculate the scores\n scores = compare_images(target, ref)\n \n #print all three scores\n print('{}\\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(file, scores[0], scores[1], scores[2]))\n \n#define the SRCNN model\n \ndef model():\n #define the model type\n SRCNN = Sequential()\n \n #add model layers\n SRCNN.add(Conv2D(filters = 128, kernel_size = (9,9), activation ='relu', padding = 'valid', use_bias = True, input_shape = (None, None, 1)))\n SRCNN.add(Conv2D(filters = 64, kernel_size = (3,3), activation ='relu', padding = 'same', use_bias = True ))\n SRCNN.add(Conv2D(filters = 1, kernel_size = (5,5), activation ='linear', padding = 'valid', use_bias = True))\n\n #define optimizer\n adam = Adam(learning_rate = 0.0003)\n #compile model\n SRCNN.compile(loss ='mean_squared_error', optimizer = adam, metrics =['mean_squared_error'])\n \n return SRCNN\n\n\n#define necessary image processing functions\ndef modcrop(img, scale):\n \n tmpsz = img.shape\n sz= tmpsz[0:2]\n sz = sz - np.mod(sz, scale)\n img = img[0:sz[0], 1:sz[1]]\n return img\n\n\ndef shave(image, border):\n img = image[border: -border, border: -border]\n return img\n\n#define main prediction function\ndef predict(image_path):\n \n #load the srcnn model with weights\n srcnn =model()\n srcnn.load_weights('3051crop_weight_200.h5')\n \n #load the degraded and reference images\n path, file =os.path.split(image_path)\n degraded = cv2.imread(image_path)\n ref = cv2.imread('source_images/{}'.format(file))\n \n #preprocess the image with modcrop\n ref = modcrop(ref, 3)\n degraded = modcrop(degraded, 3)\n \n #convert the image to YCrCb -srcnn trained on Y channel\n temp =cv2.cvtColor(degraded, cv2.COLOR_BGR2YCrCb)\n \n #create image slice and normalize\n Y = np.zeros((1, temp.shape[0], temp.shape[1], 1), dtype = float)\n Y[0, :, :, 0] = temp[:, :, 0].astype(float)/ 255\n \n #perform super resolution with srcnn\n pre = srcnn.predict(Y, batch_size = 1)\n \n #post process the output\n pre*= 255\n pre[pre[:] > 255] = 255\n pre[pre[:] < 0] = 0\n pre = pre.astype(np.uint8)\n \n #copy Y channel back to image and convert to BGR\n temp = shave(temp, 6)\n temp[:, :, 0] = pre[0, :, :, 0]\n output = cv2.cvtColor(temp, cv2.COLOR_YCrCb2BGR)\n \n #remove border from reference and degraded image\n ref = shave(ref.astype(np.uint8), 6)\n degraded = shave(degraded.astype(np.uint8), 6)\n \n #image quality calculations\n scores = []\n scores.append(compare_images(degraded, ref))\n scores.append(compare_images(output, ref))\n \n #return images and scores\n return ref, degraded, output, scores\n \n \n \nref, degraded, output, scores = predict('images/flowers.bmp')\n\n#print all score for all images\nprint('Degraded Image: \\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(scores[0][0], scores[0][1], scores[0][2]))\nprint('Reconstructed Image: \\nPSNR: {}\\nMSE: {}\\nSSIM: {}\\n'.format(scores[1][0], scores[1][1], scores[1][2]))\n\n#display images as subplots\nfig, axs = plt.subplots(1, 3, figsize = (20, 8))\naxs[0].imshow(cv2.cvtColor(ref, cv2.COLOR_BGR2RGB))\naxs[0].set_title('Original')\naxs[1].imshow(cv2.cvtColor(degraded, cv2.COLOR_BGR2RGB))\naxs[1].set_title('Degraded')\naxs[2].imshow(cv2.cvtColor(output, cv2.COLOR_BGR2RGB))\naxs[2].set_title('SRCNN')\n\n\n#remove the x and y tick marks\nfor ax in axs:\n ax.set_xticks([])\n ax.set_yticks([])",
"step-ids": [
3,
7,
9,
10,
12
]
}
|
[
3,
7,
9,
10,
12
] |
"""
Day 2
"""
with open('input.txt', 'r') as f:
lines = f.read()
lines = lines.split('\n')[:-1]
lines = [l.split(' ') for l in lines]
valid = 0
new_valid = 0
for cur_pw in lines:
letter = cur_pw[1].strip(':')
amount = cur_pw[2].count(letter)
rule = cur_pw[0].split('-')
rule = [int(r) for r in rule]
if amount >= rule[0] and amount <= rule[1]:
valid += 1
occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]
if occurences.count(letter) == 1:
new_valid += 1
print(valid)
print(new_valid)
|
normal
|
{
"blob_id": "46a3c3777d90976c7d39772d2e94430506d3acd7",
"index": 8025,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('input.txt', 'r') as f:\n lines = f.read()\n<mask token>\nfor cur_pw in lines:\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\nprint(valid)\nprint(new_valid)\n",
"step-3": "<mask token>\nwith open('input.txt', 'r') as f:\n lines = f.read()\nlines = lines.split('\\n')[:-1]\nlines = [l.split(' ') for l in lines]\nvalid = 0\nnew_valid = 0\nfor cur_pw in lines:\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\nprint(valid)\nprint(new_valid)\n",
"step-4": "\"\"\"\nDay 2\n\"\"\"\n\nwith open('input.txt', 'r') as f:\n lines = f.read()\n\nlines = lines.split('\\n')[:-1]\nlines = [l.split(' ') for l in lines]\n\nvalid = 0\nnew_valid = 0\nfor cur_pw in lines:\n\n letter = cur_pw[1].strip(':')\n amount = cur_pw[2].count(letter)\n rule = cur_pw[0].split('-')\n rule = [int(r) for r in rule]\n\n if amount >= rule[0] and amount <= rule[1]:\n valid += 1\n\n occurences = cur_pw[2][rule[0] - 1] + cur_pw[2][rule[1] - 1]\n if occurences.count(letter) == 1:\n new_valid += 1\n\nprint(valid)\nprint(new_valid)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import datasets, transforms, models
from torchvision.utils import make_grid
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
from PIL import Image
from IPython.display import display
import warnings
warnings.filterwarnings('ignore')
train_transform = transforms.Compose([
# transforms.RandomRotation(10),
# transforms.RandomHorizontalFlip(),
# transforms.Resize(224),
# transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
dataset = datasets.ImageFolder('shapes_dataset_LR',transform=train_transform)
torch.manual_seed(42)
train_data, test_data = torch.utils.data.random_split(dataset, [9000, 1000])
class_names = dataset.classes
train_loader = DataLoader(train_data, batch_size = 10, shuffle = True)
test_loader = DataLoader(test_data, batch_size = 10)
for images, labels in train_loader:
break
im = make_grid(images, nrow=5)
inv_normalize = transforms.Normalize(
mean=[-0.485/0.229, -0.486/0.224, -0.406/0.225],
std=[1/0.229, 1/0.224, 1/0.225]
)
im_inv = inv_normalize(im)
print(labels)
plt.figure(figsize=(12,4))
plt.imshow(np.transpose(im_inv.numpy(), (1,2,0)))
plt.show()
class ConvolutionalNetwork(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 6, 3, 1)
self.conv2 = nn.Conv2d(6, 16, 3, 1)
self.fc1 = nn.Linear(54 * 54 * 16, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 2)
def forward(self, X):
X = F.relu(self.conv1(X))
X = F.max_pool2d(X, 2, 2)
X = F.relu(self.conv2(X))
X = F.max_pool2d(X, 2, 2)
X = X.view(-1, 54 * 54 * 16)
X = F.relu(self.fc1(X))
X = F.relu(self.fc2(X))
X = self.fc3(X)
return F.log_softmax(X, dim=1)
torch.manual_seed(101)
CNNmodel = ConvolutionalNetwork()
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(CNNmodel.parameters(), lr=0.001)
# # to count each class in validation set
# arr = np.array(np.array(dataset.imgs)[test_data.indices, 1], dtype=int)
# cnt = np.zeros((6,1), dtype = int)
# for i in range(1000):
# for j in range(6):
# if arr[i] == j:
# cnt[j] += 1
# break
# print(cnt)
# for reproducable results
seed = 42
np.random.seed(seed)
torch.manual_seed(seed)
#The compose function allows for multiple transforms
#transforms.ToTensor() converts our PILImage to a tensor of shape (C x H x W) in the range [0,1]
#transforms.Normalize(mean,std) normalizes a tensor to a (mean, std) for (R, G, B)
# transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
#
# train_set = torchvision.datasets.CIFAR10(root='./cifardata', train=True, download=True, transform=transform)
#
# test_set = torchvision.datasets.CIFAR10(root='./cifardata', train=False, download=True, transform=transform)
#
# classes = ('0', '1', '2', '3', '4', '5')
# x = torch.rand(5, 3)
# print(x)
|
normal
|
{
"blob_id": "7821b07a49db9f3f46bedc30f2271160e281806f",
"index": 4814,
"step-1": "<mask token>\n\n\nclass ConvolutionalNetwork(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 6, 3, 1)\n self.conv2 = nn.Conv2d(6, 16, 3, 1)\n self.fc1 = nn.Linear(54 * 54 * 16, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 2)\n\n def forward(self, X):\n X = F.relu(self.conv1(X))\n X = F.max_pool2d(X, 2, 2)\n X = F.relu(self.conv2(X))\n X = F.max_pool2d(X, 2, 2)\n X = X.view(-1, 54 * 54 * 16)\n X = F.relu(self.fc1(X))\n X = F.relu(self.fc2(X))\n X = self.fc3(X)\n return F.log_softmax(X, dim=1)\n\n\n<mask token>\n",
"step-2": "<mask token>\nwarnings.filterwarnings('ignore')\n<mask token>\ntorch.manual_seed(42)\n<mask token>\nfor images, labels in train_loader:\n break\n<mask token>\nprint(labels)\nplt.figure(figsize=(12, 4))\nplt.imshow(np.transpose(im_inv.numpy(), (1, 2, 0)))\nplt.show()\n\n\nclass ConvolutionalNetwork(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 6, 3, 1)\n self.conv2 = nn.Conv2d(6, 16, 3, 1)\n self.fc1 = nn.Linear(54 * 54 * 16, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 2)\n\n def forward(self, X):\n X = F.relu(self.conv1(X))\n X = F.max_pool2d(X, 2, 2)\n X = F.relu(self.conv2(X))\n X = F.max_pool2d(X, 2, 2)\n X = X.view(-1, 54 * 54 * 16)\n X = F.relu(self.fc1(X))\n X = F.relu(self.fc2(X))\n X = self.fc3(X)\n return F.log_softmax(X, dim=1)\n\n\ntorch.manual_seed(101)\n<mask token>\nnp.random.seed(seed)\ntorch.manual_seed(seed)\n",
"step-3": "<mask token>\nwarnings.filterwarnings('ignore')\ntrain_transform = transforms.Compose([transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])\ndataset = datasets.ImageFolder('shapes_dataset_LR', transform=train_transform)\ntorch.manual_seed(42)\ntrain_data, test_data = torch.utils.data.random_split(dataset, [9000, 1000])\nclass_names = dataset.classes\ntrain_loader = DataLoader(train_data, batch_size=10, shuffle=True)\ntest_loader = DataLoader(test_data, batch_size=10)\nfor images, labels in train_loader:\n break\nim = make_grid(images, nrow=5)\ninv_normalize = transforms.Normalize(mean=[-0.485 / 0.229, -0.486 / 0.224, \n -0.406 / 0.225], std=[1 / 0.229, 1 / 0.224, 1 / 0.225])\nim_inv = inv_normalize(im)\nprint(labels)\nplt.figure(figsize=(12, 4))\nplt.imshow(np.transpose(im_inv.numpy(), (1, 2, 0)))\nplt.show()\n\n\nclass ConvolutionalNetwork(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 6, 3, 1)\n self.conv2 = nn.Conv2d(6, 16, 3, 1)\n self.fc1 = nn.Linear(54 * 54 * 16, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 2)\n\n def forward(self, X):\n X = F.relu(self.conv1(X))\n X = F.max_pool2d(X, 2, 2)\n X = F.relu(self.conv2(X))\n X = F.max_pool2d(X, 2, 2)\n X = X.view(-1, 54 * 54 * 16)\n X = F.relu(self.fc1(X))\n X = F.relu(self.fc2(X))\n X = self.fc3(X)\n return F.log_softmax(X, dim=1)\n\n\ntorch.manual_seed(101)\nCNNmodel = ConvolutionalNetwork()\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(CNNmodel.parameters(), lr=0.001)\nseed = 42\nnp.random.seed(seed)\ntorch.manual_seed(seed)\n",
"step-4": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.utils.data import DataLoader\nfrom torchvision import datasets, transforms, models\nfrom torchvision.utils import make_grid\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport os\nfrom PIL import Image\nfrom IPython.display import display\nimport warnings\nwarnings.filterwarnings('ignore')\ntrain_transform = transforms.Compose([transforms.ToTensor(), transforms.\n Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])\ndataset = datasets.ImageFolder('shapes_dataset_LR', transform=train_transform)\ntorch.manual_seed(42)\ntrain_data, test_data = torch.utils.data.random_split(dataset, [9000, 1000])\nclass_names = dataset.classes\ntrain_loader = DataLoader(train_data, batch_size=10, shuffle=True)\ntest_loader = DataLoader(test_data, batch_size=10)\nfor images, labels in train_loader:\n break\nim = make_grid(images, nrow=5)\ninv_normalize = transforms.Normalize(mean=[-0.485 / 0.229, -0.486 / 0.224, \n -0.406 / 0.225], std=[1 / 0.229, 1 / 0.224, 1 / 0.225])\nim_inv = inv_normalize(im)\nprint(labels)\nplt.figure(figsize=(12, 4))\nplt.imshow(np.transpose(im_inv.numpy(), (1, 2, 0)))\nplt.show()\n\n\nclass ConvolutionalNetwork(nn.Module):\n\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(3, 6, 3, 1)\n self.conv2 = nn.Conv2d(6, 16, 3, 1)\n self.fc1 = nn.Linear(54 * 54 * 16, 120)\n self.fc2 = nn.Linear(120, 84)\n self.fc3 = nn.Linear(84, 2)\n\n def forward(self, X):\n X = F.relu(self.conv1(X))\n X = F.max_pool2d(X, 2, 2)\n X = F.relu(self.conv2(X))\n X = F.max_pool2d(X, 2, 2)\n X = X.view(-1, 54 * 54 * 16)\n X = F.relu(self.fc1(X))\n X = F.relu(self.fc2(X))\n X = self.fc3(X)\n return F.log_softmax(X, dim=1)\n\n\ntorch.manual_seed(101)\nCNNmodel = ConvolutionalNetwork()\ncriterion = nn.CrossEntropyLoss()\noptimizer = torch.optim.Adam(CNNmodel.parameters(), lr=0.001)\nseed = 42\nnp.random.seed(seed)\ntorch.manual_seed(seed)\n",
"step-5": "import torch\r\nimport torch.nn as nn\r\nimport torch.nn.functional as F\r\nfrom torch.utils.data import DataLoader\r\nfrom torchvision import datasets, transforms, models\r\nfrom torchvision.utils import make_grid\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport os\r\nfrom PIL import Image\r\nfrom IPython.display import display\r\n\r\nimport warnings\r\nwarnings.filterwarnings('ignore')\r\n\r\ntrain_transform = transforms.Compose([\r\n # transforms.RandomRotation(10),\r\n # transforms.RandomHorizontalFlip(),\r\n # transforms.Resize(224),\r\n # transforms.CenterCrop(224),\r\n transforms.ToTensor(),\r\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\r\n])\r\n\r\ndataset = datasets.ImageFolder('shapes_dataset_LR',transform=train_transform)\r\ntorch.manual_seed(42)\r\ntrain_data, test_data = torch.utils.data.random_split(dataset, [9000, 1000])\r\n\r\nclass_names = dataset.classes\r\n\r\ntrain_loader = DataLoader(train_data, batch_size = 10, shuffle = True)\r\ntest_loader = DataLoader(test_data, batch_size = 10)\r\n\r\nfor images, labels in train_loader:\r\n break\r\n\r\nim = make_grid(images, nrow=5)\r\n\r\ninv_normalize = transforms.Normalize(\r\n mean=[-0.485/0.229, -0.486/0.224, -0.406/0.225],\r\n std=[1/0.229, 1/0.224, 1/0.225]\r\n)\r\n\r\nim_inv = inv_normalize(im)\r\nprint(labels)\r\nplt.figure(figsize=(12,4))\r\nplt.imshow(np.transpose(im_inv.numpy(), (1,2,0)))\r\nplt.show()\r\n\r\n\r\nclass ConvolutionalNetwork(nn.Module):\r\n\r\n def __init__(self):\r\n super().__init__()\r\n self.conv1 = nn.Conv2d(3, 6, 3, 1)\r\n self.conv2 = nn.Conv2d(6, 16, 3, 1)\r\n self.fc1 = nn.Linear(54 * 54 * 16, 120)\r\n self.fc2 = nn.Linear(120, 84)\r\n self.fc3 = nn.Linear(84, 2)\r\n\r\n def forward(self, X):\r\n X = F.relu(self.conv1(X))\r\n X = F.max_pool2d(X, 2, 2)\r\n X = F.relu(self.conv2(X))\r\n X = F.max_pool2d(X, 2, 2)\r\n X = X.view(-1, 54 * 54 * 16)\r\n X = F.relu(self.fc1(X))\r\n X = F.relu(self.fc2(X))\r\n X = self.fc3(X)\r\n\r\n return F.log_softmax(X, dim=1)\r\n\r\ntorch.manual_seed(101)\r\nCNNmodel = ConvolutionalNetwork()\r\ncriterion = nn.CrossEntropyLoss()\r\noptimizer = torch.optim.Adam(CNNmodel.parameters(), lr=0.001)\r\n\r\n\r\n\r\n# # to count each class in validation set\r\n# arr = np.array(np.array(dataset.imgs)[test_data.indices, 1], dtype=int)\r\n# cnt = np.zeros((6,1), dtype = int)\r\n# for i in range(1000):\r\n# for j in range(6):\r\n# if arr[i] == j:\r\n# cnt[j] += 1\r\n# break\r\n# print(cnt)\r\n\r\n\r\n# for reproducable results\r\nseed = 42\r\nnp.random.seed(seed)\r\ntorch.manual_seed(seed)\r\n\r\n\r\n#The compose function allows for multiple transforms\r\n#transforms.ToTensor() converts our PILImage to a tensor of shape (C x H x W) in the range [0,1]\r\n#transforms.Normalize(mean,std) normalizes a tensor to a (mean, std) for (R, G, B)\r\n# transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])\r\n#\r\n# train_set = torchvision.datasets.CIFAR10(root='./cifardata', train=True, download=True, transform=transform)\r\n#\r\n# test_set = torchvision.datasets.CIFAR10(root='./cifardata', train=False, download=True, transform=transform)\r\n#\r\n# classes = ('0', '1', '2', '3', '4', '5')\r\n\r\n# x = torch.rand(5, 3)\r\n# print(x)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import os
import sys
import re
import traceback
import logging
import queue
import threading
from logging.handlers import TimedRotatingFileHandler
from pathlib import Path
import click
import inotify.adapters
from inotify.constants import (IN_ATTRIB, IN_DELETE, IN_MOVED_FROM,
IN_MOVED_TO, IN_CLOSE_WRITE)
from lxd_image_server.simplestreams.images import Images
from lxd_image_server.tools.cert import generate_cert
from lxd_image_server.tools.operation import Operations
from lxd_image_server.tools.mirror import MirrorManager
from lxd_image_server.tools.config import Config
logger = logging.getLogger('lxd-image-server')
event_queue = queue.Queue()
def threaded(fn):
def wrapper(*args, **kwargs):
threading.Thread(target=fn, args=args, kwargs=kwargs).start()
return wrapper
def configure_log(log_file, verbose=False):
filename = log_file
if log_file == 'STDOUT':
handler = logging.StreamHandler(sys.stdout)
elif log_file == 'STDERR':
handler = logging.StreamHandler(sys.stderr)
else:
handler = TimedRotatingFileHandler(
filename,
when="d", interval=7, backupCount=4)
formatter = logging.Formatter('[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')
handler.setFormatter(formatter)
logger.setLevel('DEBUG' if verbose else 'INFO')
logger.addHandler(handler)
def needs_update(events):
modified_files = []
for event in list(events):
if re.match('\d{8}_\d{2}:\d{2}', event[3]) or \
any(k in event[1]
for k in ('IN_MOVED_FROM', 'IN_MOVED_TO',
'IN_DELETE', 'IN_CLOSE_WRITE')):
logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'
.format(event[2], event[3], event[1]))
modified_files.append(event)
return modified_files
def config_inotify_setup(skipWatchingNonExistent: bool) -> inotify.adapters.Inotify:
i = inotify.adapters.Inotify()
watchedDirs = {}
for p in Config.paths:
if os.path.exists(p):
if os.path.isfile(p):
logger.debug("Watching existing config file {}".format(p))
i.add_watch(p, mask= inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_DELETE)
else:
logger.debug("Watching existing config directory {}".format(p))
i.add_watch(p) # SEEME: all events?
elif not skipWatchingNonExistent:
(d, n) = os.path.split(p)
while not os.path.exists(d):
(d, n) = os.path.split(d)
if d not in watchedDirs:
i.add_watch(d, inotify.constants.IN_DELETE | inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)
logger.debug("Watching directory {} as base for {}".format(d, p))
watchedDirs[d] = True
return i
@threaded
def update_config(skipWatchingNonExistent = True):
i = config_inotify_setup(skipWatchingNonExistent)
while True:
reload = False
for event in i.event_gen(yield_nones=False):
(_, mask, dir, file) = event
fp = os.path.join(dir, file).rstrip(os.path.sep)
for p in Config.paths:
if p == fp or (dir == p):
reload = True
break
if reload:
break
if reload:
logger.debug("Will reload configuration")
Config.reload_data()
i = config_inotify_setup()
MirrorManager.update_mirror_list()
else:
logger.debug("No need to reload configuration")
@threaded
def update_metadata(img_dir, streams_dir):
MirrorManager.img_dir = img_dir
MirrorManager.update_mirror_list()
while True:
events = event_queue.get()
ops = Operations(events, str(Path(img_dir).resolve()))
if ops:
logger.info('Updating server: %s', ','.join(
str(x) for x in ops.ops))
images = Images(str(Path(streams_dir).resolve()), logger=logger)
images.update(ops.ops)
images.save()
MirrorManager.update()
logger.info('Server updated')
def fix_permissions(path):
Path(path).chmod(0o775)
for root, dirs, files in os.walk(path):
for elem in files:
Path(root, elem).chmod(0o775)
for elem in dirs:
Path(root, elem).chmod(0o775)
@click.group()
@click.option('--log-file', default='./lxd-image-server.log',
show_default=True)
@click.option('--verbose', help='Sets log level to debug',
is_flag=True, default=False)
def cli(log_file, verbose):
configure_log(log_file, verbose)
@cli.command()
@click.option('--img_dir', default='/var/www/simplestreams/images',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True),
callback=lambda ctx, param, val: Path(val))
@click.option('--streams_dir', default='/var/www/simplestreams/streams/v1',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True))
@click.pass_context
def update(ctx, img_dir, streams_dir):
logger.info('Updating server')
img_dir = Path(img_dir).expanduser().resolve()
streams_dir = Path(streams_dir).expanduser().resolve()
images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=logger)
# Generate a fake event to update all tree
fake_events = [
(None, ['IN_ISDIR', 'IN_CREATE'],
str(img_dir.parent), str(img_dir.name))
]
operations = Operations(fake_events, str(img_dir))
images.update(operations.ops)
images.save()
logger.info('Server updated')
@cli.command()
@click.option('--root_dir', default='/var/www/simplestreams',
show_default=True)
@click.option('--ssl_dir', default='/etc/nginx/ssl', show_default=True,
callback=lambda ctx, param, val: Path(val))
@click.option('--ssl_skip', default=False, is_flag=True)
@click.option('--nginx_skip', default=False, is_flag=True)
@click.pass_context
def init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):
root_dir = Path(root_dir).expanduser().resolve()
if not Path(root_dir).exists():
logger.error('Root directory does not exists')
else:
if nginx_skip:
ssl_skip = True
if not ssl_skip:
if not ssl_dir.exists():
os.makedirs(str(ssl_dir))
if not (ssl_dir / 'nginx.key').exists():
generate_cert(str(ssl_dir))
img_dir = str(Path(root_dir, 'images'))
streams_dir = str(Path(root_dir, 'streams/v1'))
if not Path(img_dir).exists():
os.makedirs(img_dir)
if not Path(streams_dir).exists():
os.makedirs(streams_dir)
if not nginx_skip:
conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')
if not conf_path.exists():
conf_path.symlink_to(
'/etc/nginx/sites-available/simplestreams.conf')
os.system('nginx -s reload')
if not Path(root_dir, 'streams', 'v1', 'images.json').exists():
ctx.invoke(update, img_dir=Path(root_dir, 'images'),
streams_dir=Path(root_dir, 'streams', 'v1'))
fix_permissions(img_dir)
fix_permissions(streams_dir)
@cli.command()
@click.option('--img_dir', default='/var/www/simplestreams/images',
show_default=True,
type=click.Path(exists=True, file_okay=False,
resolve_path=True))
@click.option('--streams_dir', default='/var/www/simplestreams/streams/v1',
type=click.Path(exists=True, file_okay=False,
resolve_path=True), show_default=True)
@click.option('--skip-watch-config-non-existent', default=False, type=bool, is_flag=True)
@click.pass_context
def watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):
path_img_dir = str(Path(img_dir).expanduser().resolve())
path_streams_dir = str(Path(streams_dir).expanduser().resolve())
logger.info("Starting watch process")
Config.load_data()
# Lauch threads
# SEEME: in case an event will come from watching config files, there is a race condition between update_config
# thread using indirectly MirrorManager.img_dir and thread update_metadata setting MirrorManager.img_dir
# Also, race condition on calling MirrorManager.update_mirror_list() in both threads.
update_config(skip_watch_config_non_existent)
update_metadata(path_img_dir, path_streams_dir)
logger.debug("Watching image directory {}".format(path_img_dir))
i = inotify.adapters.InotifyTree(path_img_dir,
mask=(IN_ATTRIB | IN_DELETE |
IN_MOVED_FROM | IN_MOVED_TO |
IN_CLOSE_WRITE))
while True:
events = i.event_gen(yield_nones=False, timeout_s=15)
files_changed = needs_update(events)
if files_changed:
event_queue.put(files_changed)
def main():
try:
sys.exit(cli())
except Exception:
logger.error(traceback.format_exc())
sys.exit(1)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "3a96ede91069df0c71905415e598dbbd9d3056fd",
"index": 9730,
"step-1": "<mask token>\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\n<mask token>\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<mask token>\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n<mask token>\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\n<mask token>\n",
"step-4": "<mask token>\nlogger = logging.getLogger('lxd-image-server')\nevent_queue = queue.Queue()\n\n\ndef threaded(fn):\n\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(filename, when='d', interval=7,\n backupCount=4)\n formatter = logging.Formatter(\n '[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\\\d{8}_\\\\d{2}:\\\\d{2}', event[3]) or any(k in event[1] for\n k in ('IN_MOVED_FROM', 'IN_MOVED_TO', 'IN_DELETE',\n 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'.\n format(event[2], event[3], event[1]))\n modified_files.append(event)\n return modified_files\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool\n ) ->inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug('Watching existing config file {}'.format(p))\n i.add_watch(p, mask=inotify.constants.IN_CLOSE_WRITE |\n inotify.constants.IN_DELETE)\n else:\n logger.debug('Watching existing config directory {}'.format(p))\n i.add_watch(p)\n elif not skipWatchingNonExistent:\n d, n = os.path.split(p)\n while not os.path.exists(d):\n d, n = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.\n constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug('Watching directory {} as base for {}'.format(\n d, p))\n watchedDirs[d] = True\n return i\n\n\n@threaded\ndef update_config(skipWatchingNonExistent=True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n _, mask, dir, file = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or dir == p:\n reload = True\n break\n if reload:\n break\n if reload:\n logger.debug('Will reload configuration')\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug('No need to reload configuration')\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(str(x) for x in ops\n .ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(509)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(509)\n for elem in dirs:\n Path(root, elem).chmod(509)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log', show_default=True\n )\[email protected]('--verbose', help='Sets log level to debug', is_flag=True,\n default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True), callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=\n logger)\n fake_events = [(None, ['IN_ISDIR', 'IN_CREATE'], str(img_dir.parent),\n str(img_dir.name))]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams', show_default=True\n )\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True, type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False, resolve_path=True),\n show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool,\n is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info('Starting watch process')\n Config.load_data()\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug('Watching image directory {}'.format(path_img_dir))\n i = inotify.adapters.InotifyTree(path_img_dir, mask=IN_ATTRIB |\n IN_DELETE | IN_MOVED_FROM | IN_MOVED_TO | IN_CLOSE_WRITE)\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import os\nimport sys\nimport re\nimport traceback\nimport logging\nimport queue\nimport threading\nfrom logging.handlers import TimedRotatingFileHandler\nfrom pathlib import Path\nimport click\nimport inotify.adapters\nfrom inotify.constants import (IN_ATTRIB, IN_DELETE, IN_MOVED_FROM,\n IN_MOVED_TO, IN_CLOSE_WRITE)\nfrom lxd_image_server.simplestreams.images import Images\nfrom lxd_image_server.tools.cert import generate_cert\nfrom lxd_image_server.tools.operation import Operations\nfrom lxd_image_server.tools.mirror import MirrorManager\nfrom lxd_image_server.tools.config import Config\n\n\nlogger = logging.getLogger('lxd-image-server')\nevent_queue = queue.Queue()\n\ndef threaded(fn):\n def wrapper(*args, **kwargs):\n threading.Thread(target=fn, args=args, kwargs=kwargs).start()\n return wrapper\n\n\ndef configure_log(log_file, verbose=False):\n filename = log_file\n\n if log_file == 'STDOUT':\n handler = logging.StreamHandler(sys.stdout)\n elif log_file == 'STDERR':\n handler = logging.StreamHandler(sys.stderr)\n else:\n handler = TimedRotatingFileHandler(\n filename,\n when=\"d\", interval=7, backupCount=4)\n formatter = logging.Formatter('[%(asctime)s] [LxdImgServer] [%(levelname)s] %(message)s')\n handler.setFormatter(formatter)\n\n logger.setLevel('DEBUG' if verbose else 'INFO')\n logger.addHandler(handler)\n\n\ndef needs_update(events):\n modified_files = []\n for event in list(events):\n if re.match('\\d{8}_\\d{2}:\\d{2}', event[3]) or \\\n any(k in event[1]\n for k in ('IN_MOVED_FROM', 'IN_MOVED_TO',\n 'IN_DELETE', 'IN_CLOSE_WRITE')):\n logger.debug('Event: PATH=[{}] FILENAME=[{}] EVENT_TYPES={}'\n .format(event[2], event[3], event[1]))\n modified_files.append(event)\n\n return modified_files\n\n\n\ndef config_inotify_setup(skipWatchingNonExistent: bool) -> inotify.adapters.Inotify:\n i = inotify.adapters.Inotify()\n watchedDirs = {}\n\n for p in Config.paths:\n if os.path.exists(p):\n if os.path.isfile(p):\n logger.debug(\"Watching existing config file {}\".format(p))\n i.add_watch(p, mask= inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_DELETE)\n else:\n logger.debug(\"Watching existing config directory {}\".format(p))\n i.add_watch(p) # SEEME: all events?\n elif not skipWatchingNonExistent:\n (d, n) = os.path.split(p)\n while not os.path.exists(d):\n (d, n) = os.path.split(d)\n if d not in watchedDirs:\n i.add_watch(d, inotify.constants.IN_DELETE | inotify.constants.IN_CLOSE_WRITE | inotify.constants.IN_CREATE)\n logger.debug(\"Watching directory {} as base for {}\".format(d, p))\n watchedDirs[d] = True\n\n return i\n\n@threaded\ndef update_config(skipWatchingNonExistent = True):\n i = config_inotify_setup(skipWatchingNonExistent)\n while True:\n reload = False\n for event in i.event_gen(yield_nones=False):\n (_, mask, dir, file) = event\n fp = os.path.join(dir, file).rstrip(os.path.sep)\n for p in Config.paths:\n if p == fp or (dir == p):\n reload = True\n break\n if reload:\n break\n\n if reload:\n logger.debug(\"Will reload configuration\")\n Config.reload_data()\n i = config_inotify_setup()\n MirrorManager.update_mirror_list()\n else:\n logger.debug(\"No need to reload configuration\")\n\n\n@threaded\ndef update_metadata(img_dir, streams_dir):\n MirrorManager.img_dir = img_dir\n MirrorManager.update_mirror_list()\n while True:\n events = event_queue.get()\n ops = Operations(events, str(Path(img_dir).resolve()))\n if ops:\n logger.info('Updating server: %s', ','.join(\n str(x) for x in ops.ops))\n images = Images(str(Path(streams_dir).resolve()), logger=logger)\n images.update(ops.ops)\n images.save()\n MirrorManager.update()\n logger.info('Server updated')\n\n\ndef fix_permissions(path):\n Path(path).chmod(0o775)\n for root, dirs, files in os.walk(path):\n for elem in files:\n Path(root, elem).chmod(0o775)\n for elem in dirs:\n Path(root, elem).chmod(0o775)\n\n\[email protected]()\[email protected]('--log-file', default='./lxd-image-server.log',\n show_default=True)\[email protected]('--verbose', help='Sets log level to debug',\n is_flag=True, default=False)\ndef cli(log_file, verbose):\n configure_log(log_file, verbose)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True),\n callback=lambda ctx, param, val: Path(val))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]_context\ndef update(ctx, img_dir, streams_dir):\n logger.info('Updating server')\n\n img_dir = Path(img_dir).expanduser().resolve()\n streams_dir = Path(streams_dir).expanduser().resolve()\n\n images = Images(str(Path(streams_dir).resolve()), rebuild=True, logger=logger)\n\n # Generate a fake event to update all tree\n fake_events = [\n (None, ['IN_ISDIR', 'IN_CREATE'],\n str(img_dir.parent), str(img_dir.name))\n ]\n operations = Operations(fake_events, str(img_dir))\n images.update(operations.ops)\n images.save()\n\n logger.info('Server updated')\n\n\[email protected]()\[email protected]('--root_dir', default='/var/www/simplestreams',\n show_default=True)\[email protected]('--ssl_dir', default='/etc/nginx/ssl', show_default=True,\n callback=lambda ctx, param, val: Path(val))\[email protected]('--ssl_skip', default=False, is_flag=True)\[email protected]('--nginx_skip', default=False, is_flag=True)\[email protected]_context\ndef init(ctx, root_dir, ssl_dir, ssl_skip, nginx_skip):\n root_dir = Path(root_dir).expanduser().resolve()\n\n if not Path(root_dir).exists():\n logger.error('Root directory does not exists')\n else:\n if nginx_skip:\n ssl_skip = True\n\n if not ssl_skip:\n if not ssl_dir.exists():\n os.makedirs(str(ssl_dir))\n\n if not (ssl_dir / 'nginx.key').exists():\n generate_cert(str(ssl_dir))\n\n img_dir = str(Path(root_dir, 'images'))\n streams_dir = str(Path(root_dir, 'streams/v1'))\n if not Path(img_dir).exists():\n os.makedirs(img_dir)\n if not Path(streams_dir).exists():\n os.makedirs(streams_dir)\n\n if not nginx_skip:\n conf_path = Path('/etc/nginx/sites-enabled/simplestreams.conf')\n if not conf_path.exists():\n conf_path.symlink_to(\n '/etc/nginx/sites-available/simplestreams.conf')\n os.system('nginx -s reload')\n\n if not Path(root_dir, 'streams', 'v1', 'images.json').exists():\n ctx.invoke(update, img_dir=Path(root_dir, 'images'),\n streams_dir=Path(root_dir, 'streams', 'v1'))\n\n fix_permissions(img_dir)\n fix_permissions(streams_dir)\n\n\[email protected]()\[email protected]('--img_dir', default='/var/www/simplestreams/images',\n show_default=True,\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True))\[email protected]('--streams_dir', default='/var/www/simplestreams/streams/v1',\n type=click.Path(exists=True, file_okay=False,\n resolve_path=True), show_default=True)\[email protected]('--skip-watch-config-non-existent', default=False, type=bool, is_flag=True)\[email protected]_context\ndef watch(ctx, img_dir, streams_dir, skip_watch_config_non_existent: bool):\n path_img_dir = str(Path(img_dir).expanduser().resolve())\n path_streams_dir = str(Path(streams_dir).expanduser().resolve())\n logger.info(\"Starting watch process\")\n\n Config.load_data()\n # Lauch threads\n # SEEME: in case an event will come from watching config files, there is a race condition between update_config\n # thread using indirectly MirrorManager.img_dir and thread update_metadata setting MirrorManager.img_dir\n # Also, race condition on calling MirrorManager.update_mirror_list() in both threads.\n update_config(skip_watch_config_non_existent)\n update_metadata(path_img_dir, path_streams_dir)\n logger.debug(\"Watching image directory {}\".format(path_img_dir))\n\n i = inotify.adapters.InotifyTree(path_img_dir,\n mask=(IN_ATTRIB | IN_DELETE |\n IN_MOVED_FROM | IN_MOVED_TO |\n IN_CLOSE_WRITE))\n\n while True:\n events = i.event_gen(yield_nones=False, timeout_s=15)\n files_changed = needs_update(events)\n if files_changed:\n event_queue.put(files_changed)\n\n\ndef main():\n try:\n sys.exit(cli())\n except Exception:\n logger.error(traceback.format_exc())\n sys.exit(1)\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
7,
11,
12,
14,
16
]
}
|
[
7,
11,
12,
14,
16
] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# you can use print for debugging purposes, e.g.
# print "this is a debug message"
def solution(A):
N = len (A)
#min_avg = min( (A[0] + A[1]) / 2, (A[0] + A[1] + A[2]) / 3)
min_avg = (A[0] + A[1]) / 2.0
min_idx = 0
now_avg = 0.0
for i in xrange(1,N-1):
now_avg = (A[i] + A[i+1]) / 2.0
if now_avg < min_avg:
min_avg = now_avg
min_idx = i
if N > 2:
for i in xrange(N-2):
now_avg = (A[i] + A[i+1] + A[i+2]) / 3.0
if now_avg < min_avg:
min_avg = now_avg
min_idx = i
return min_idx
"""
non-empty zero-indexed array A consisting of N integers is given. A pair of integers (P, Q), such that 0 ≤ P < Q < N, is called a slice of array A (notice that the slice contains at least two elements). The average of a slice (P, Q) is the sum of A[P] + A[P + 1] + ... + A[Q] divided by the length of the slice. To be precise, the average equals (A[P] + A[P + 1] + ... + A[Q]) / (Q − P + 1).
For example, array A such that:
A[0] = 4
A[1] = 2
A[2] = 2
A[3] = 5
A[4] = 1
A[5] = 5
A[6] = 8
contains the following example slices:
slice (1, 2), whose average is (2 + 2) / 2 = 2;
slice (3, 4), whose average is (5 + 1) / 2 = 3;
slice (1, 4), whose average is (2 + 2 + 5 + 1) / 4 = 2.5.
The goal is to find the starting position of a slice whose average is minimal.
Write a function:
def solution(A)
that, given a non-empty zero-indexed array A consisting of N integers, returns the starting position of the slice with the minimal average. If there is more than one slice with a minimal average, you should return the smallest starting position of such a slice.
For example, given array A such that:
A[0] = 4
A[1] = 2
A[2] = 2
A[3] = 5
A[4] = 1
A[5] = 5
A[6] = 8
the function should return 1, as explained above.
Assume that:
N is an integer within the range [2..100,000];
each element of array A is an integer within the range [−10,000..10,000].
Complexity:
expected worst-case time complexity is O(N);
expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).
Elements of input arrays can be modified.
"""
"""
Analysis
Detected time complexity:
O(N)
collapse all
Example tests
▶
example
example test
✔
OK
1.
0.067 s
OK
collapse all
Correctness tests
▶
double_quadruple
two or four elements
✔
OK
1.
0.066 s
OK
2.
0.067 s
OK
3.
0.067 s
OK
4.
0.066 s
OK
▶
simple1
simple test, the best slice has length 3
✔
OK
1.
0.066 s
OK
2.
0.065 s
OK
▶
simple2
simple test, the best slice has length 3
✔
OK
1.
0.067 s
OK
▶
small_random
random, length = 100
✔
OK
1.
0.067 s
OK
▶
medium_range
increasing, decreasing (legth = ~100) and small functional
✔
OK
1.
0.066 s
OK
2.
0.067 s
OK
3.
0.067 s
OK
collapse all
Performance tests
▶
medium_random
random, N = ~700
✔
OK
1.
0.066 s
OK
▶
large_ones
numbers from -1 to 1, N = ~100,000
✔
OK
1.
0.168 s
OK
2.
0.143 s
OK
▶
large_random
random, N = ~100,000
✔
OK
1.
0.178 s
OK
▶
extreme_values
all maximal values, N = ~100,000
✔
OK
1.
0.184 s
OK
2.
0.181 s
OK
3.
0.175 s
OK
▶
large_sequence
many seqeneces, N = ~100,000
✔
OK
1.
0.168 s
OK
2.
0.142 s
OK
"""
|
normal
|
{
"blob_id": "caa92eb5582135f60a6034cb83d364501361d00e",
"index": 7726,
"step-1": "<mask token>\n",
"step-2": "def solution(A):\n N = len(A)\n min_avg = (A[0] + A[1]) / 2.0\n min_idx = 0\n now_avg = 0.0\n for i in xrange(1, N - 1):\n now_avg = (A[i] + A[i + 1]) / 2.0\n if now_avg < min_avg:\n min_avg = now_avg\n min_idx = i\n if N > 2:\n for i in xrange(N - 2):\n now_avg = (A[i] + A[i + 1] + A[i + 2]) / 3.0\n if now_avg < min_avg:\n min_avg = now_avg\n min_idx = i\n return min_idx\n\n\n<mask token>\n",
"step-3": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# you can use print for debugging purposes, e.g.\n# print \"this is a debug message\"\n\n\ndef solution(A):\n N = len (A)\n #min_avg = min( (A[0] + A[1]) / 2, (A[0] + A[1] + A[2]) / 3)\n min_avg = (A[0] + A[1]) / 2.0\n min_idx = 0\n now_avg = 0.0\n \n \n for i in xrange(1,N-1):\n now_avg = (A[i] + A[i+1]) / 2.0\n if now_avg < min_avg:\n min_avg = now_avg\n min_idx = i \n \n if N > 2: \n for i in xrange(N-2):\n now_avg = (A[i] + A[i+1] + A[i+2]) / 3.0\n if now_avg < min_avg:\n min_avg = now_avg\n min_idx = i\n \n return min_idx\n\n\n\"\"\"\n\n non-empty zero-indexed array A consisting of N integers is given. A pair of integers (P, Q), such that 0 ≤ P < Q < N, is called a slice of array A (notice that the slice contains at least two elements). The average of a slice (P, Q) is the sum of A[P] + A[P + 1] + ... + A[Q] divided by the length of the slice. To be precise, the average equals (A[P] + A[P + 1] + ... + A[Q]) / (Q − P + 1).\n\nFor example, array A such that:\n A[0] = 4\n A[1] = 2\n A[2] = 2\n A[3] = 5\n A[4] = 1\n A[5] = 5\n A[6] = 8\n\ncontains the following example slices:\n\n slice (1, 2), whose average is (2 + 2) / 2 = 2;\n slice (3, 4), whose average is (5 + 1) / 2 = 3;\n slice (1, 4), whose average is (2 + 2 + 5 + 1) / 4 = 2.5.\n\nThe goal is to find the starting position of a slice whose average is minimal.\n\nWrite a function:\n\n def solution(A)\n\nthat, given a non-empty zero-indexed array A consisting of N integers, returns the starting position of the slice with the minimal average. If there is more than one slice with a minimal average, you should return the smallest starting position of such a slice.\n\nFor example, given array A such that:\n A[0] = 4\n A[1] = 2\n A[2] = 2\n A[3] = 5\n A[4] = 1\n A[5] = 5\n A[6] = 8\n\nthe function should return 1, as explained above.\n\nAssume that:\n\n N is an integer within the range [2..100,000];\n each element of array A is an integer within the range [−10,000..10,000].\n\nComplexity:\n\n expected worst-case time complexity is O(N);\n expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).\n\nElements of input arrays can be modified.\n\"\"\"\n\n\"\"\"\nAnalysis\nDetected time complexity:\nO(N)\ncollapse all\nExample tests\n▶\nexample\nexample test\n✔\nOK\n1.\n0.067 s\nOK\ncollapse all\nCorrectness tests\n▶\ndouble_quadruple\ntwo or four elements\n✔\nOK\n1.\n0.066 s\nOK\n2.\n0.067 s\nOK\n3.\n0.067 s\nOK\n4.\n0.066 s\nOK\n▶\nsimple1\nsimple test, the best slice has length 3\n✔\nOK\n1.\n0.066 s\nOK\n2.\n0.065 s\nOK\n▶\nsimple2\nsimple test, the best slice has length 3\n✔\nOK\n1.\n0.067 s\nOK\n▶\nsmall_random\nrandom, length = 100\n✔\nOK\n1.\n0.067 s\nOK\n▶\nmedium_range\nincreasing, decreasing (legth = ~100) and small functional\n✔\nOK\n1.\n0.066 s\nOK\n2.\n0.067 s\nOK\n3.\n0.067 s\nOK\ncollapse all\nPerformance tests\n▶\nmedium_random\nrandom, N = ~700\n✔\nOK\n1.\n0.066 s\nOK\n▶\nlarge_ones\nnumbers from -1 to 1, N = ~100,000\n✔\nOK\n1.\n0.168 s\nOK\n2.\n0.143 s\nOK\n▶\nlarge_random\nrandom, N = ~100,000\n✔\nOK\n1.\n0.178 s\nOK\n▶\nextreme_values\nall maximal values, N = ~100,000\n✔\nOK\n1.\n0.184 s\nOK\n2.\n0.181 s\nOK\n3.\n0.175 s\nOK\n▶\nlarge_sequence\nmany seqeneces, N = ~100,000\n✔\nOK\n1.\n0.168 s\nOK\n2.\n0.142 s\nOK\n\n\"\"\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Copyright 2018 dhtech
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file
import lib
import urlparse
import yaml
MANIFEST_PATH = '/etc/manifest'
HTTP_BASIC_AUTH = None
def blackbox(name, backend, targets, params,
target='target', path='/probe', labels=None):
labels = {} if labels is None else labels
# Strip banned OSes
banned_oses = ['debian']
filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]
return {
'job_name': name,
'metrics_path': path,
'params': params,
'static_configs': [{
'targets': sorted(filtered_targets),
'labels': labels
}],
'relabel_configs': [{
'source_labels': ['__address__'],
'regex': '(.*)(:80)?',
'target_label': '__param_%s' % target,
'replacement': '${1}',
}, {
'source_labels': ['__param_%s' % target],
'regex': '(.*)',
'target_label': 'instance',
'replacement': '${1}',
}, {
'source_labels': [],
'regex': '.*',
'target_label': '__address__',
'replacement': backend,
}]
}
def generate_backend(host, local_services):
scrape_configs = []
scrape_configs.extend(local_services)
domain = lib.get_domain(host)
basic_auth = lib.read_secret('services/monitoring:login')
# Find services that wants to be monitored
manifest = yaml.load(file(MANIFEST_PATH).read())
for package, spec in manifest['packages'].iteritems():
if spec is None or 'monitor' not in spec:
continue
urls = (spec['monitor']['url']
if isinstance(spec['monitor']['url'], dict) else
{None: spec['monitor']['url']})
for url_id, url_str in urls.iteritems():
url = urlparse.urlparse(url_str)
targets = []
for target in sorted(
lib.get_nodes_with_package(package, domain).keys()):
targets.append(target if url.port is None else '%s:%d' % (
target, url.port))
scrape_config = {
'job_name': package + ('-%s' % url_id if url_id else ''),
'metrics_path': url.path,
'scheme': url.scheme,
'static_configs': [
{'targets': sorted(targets)}
],
}
if 'interval' in spec['monitor']:
scrape_config['scrape_interval'] = spec['monitor']['interval']
if 'labels' in spec['monitor']:
scrape_config['static_configs'][0]['labels'] = spec['monitor']['labels']
# Only allow authentication over https
if spec['monitor'].get('auth', False) and url.scheme == 'https':
scrape_config['basic_auth'] = basic_auth
scrape_configs.append(scrape_config)
# Layer specific monitoring
layers = lib.get_layers(domain)
snmp_nodes = {}
ssh_nodes = {}
for layer in layers:
hosts = lib.get_nodes_with_layer(layer, domain)
snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')
ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')
snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))
ssh_nodes[layer] = [x+':22' for x in set(hosts) - set(ssh_mute)]
# SNMP
for layer in layers:
# TODO(bluecmd): Use options for this
if layer == 'access':
snmp_host = 'snmp2.event.dreamhack.se'
else:
snmp_host = 'snmp1.event.dreamhack.se'
snmp = blackbox(
'snmp_%s' % layer, snmp_host,
snmp_nodes[layer], {'layer': [layer]}, labels={
'layer': layer})
snmp['scrape_interval'] = '30s'
snmp['scrape_timeout'] = '30s'
scrape_configs.append(snmp)
# SSH
for layer in layers:
for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:
fqdn = ssh_host + '.event.dreamhack.se:9115'
ssh = blackbox(
'ssh_%s_%s' % (layer, ssh_host), fqdn,
ssh_nodes[layer], {'module': ['ssh_banner']}, labels={'layer': layer})
ssh['scrape_interval'] = '30s'
ssh['scrape_timeout'] = '30s'
scrape_configs.append(ssh)
# Add external service-discovery
external = {
'job_name': 'external',
'file_sd_configs': [{
'files': ['/etc/prometheus/external/*.yaml'],
}],
}
scrape_configs.append(external)
if host.endswith('.event.dreamhack.se'):
# Event should scrape puppet.tech.dreamhack.se to get information about
# puppet runs
puppet = {
'job_name': 'puppet_runs',
'metrics_path': '/metrics',
'scrape_interval': '60s',
'scrape_timeout': '55s',
'static_configs': [{
'targets': ['puppet.tech.dreamhack.se:9100'],
}],
}
scrape_configs.append(puppet)
vcenter = {
'job_name': 'vmware_vcenter',
'metrics_path': '/metrics',
'scrape_interval': '60s',
'scrape_timeout': '55s',
'static_configs': [{
'targets': ['provision.event.dreamhack.se:9272'],
}],
}
scrape_configs.append(vcenter)
# Make sure that all metrics have a host label.
# This rule uses the existing host label if there is one,
# stripping of the port (which shouldn't be part of the host label anyway)
# *or* if that label does not exist it uses the instance label
# (again stripping of the port)
relabel = {
'regex': r':?([^:]*):?.*',
'separator': ':',
'replacement': '${1}',
'source_labels': ['host', 'instance'],
'target_label': 'host',
}
mrc = 'metric_relabel_configs'
for scrape in scrape_configs:
if mrc in scrape:
scrape[mrc].append(relabel)
else:
scrape[mrc] = [relabel]
return {'scrape_configs': scrape_configs}
def requires(host, *args):
return ['apache(ldap)']
def generate(host, *args):
info = {}
local_targets = []
local_targets.append({
'job_name': 'prometheus',
'scheme': 'http',
'static_configs': [{'targets': ['localhost:9090']}]})
info['prometheus'] = generate_backend(host, local_targets)
# Get current event
info['prometheus']['current_event'] = lib.get_current_event()
return info
# vim: ts=4: sts=4: sw=4: expandtab
|
normal
|
{
"blob_id": "f489058c922d405754ad32a737f67bc03c08772b",
"index": 701,
"step-1": "<mask token>\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\n<mask token>\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-2": "<mask token>\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-3": "<mask token>\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-4": "import lib\nimport urlparse\nimport yaml\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params, target='target', path='/probe',\n labels=None):\n labels = {} if labels is None else labels\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {'job_name': name, 'metrics_path': path, 'params': params,\n 'static_configs': [{'targets': sorted(filtered_targets), 'labels':\n labels}], 'relabel_configs': [{'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?', 'target_label': '__param_%s' % target,\n 'replacement': '${1}'}, {'source_labels': ['__param_%s' % target],\n 'regex': '(.*)', 'target_label': 'instance', 'replacement': '${1}'},\n {'source_labels': [], 'regex': '.*', 'target_label': '__address__',\n 'replacement': backend}]}\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n basic_auth = lib.read_secret('services/monitoring:login')\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n urls = spec['monitor']['url'] if isinstance(spec['monitor']['url'],\n dict) else {None: spec['monitor']['url']}\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(lib.get_nodes_with_package(package, domain\n ).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {'job_name': package + ('-%s' % url_id if\n url_id else ''), 'metrics_path': url.path, 'scheme': url.\n scheme, 'static_configs': [{'targets': sorted(targets)}]}\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor'][\n 'labels']\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n layers = lib.get_layers(domain)\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [(x + ':22') for x in set(hosts) - set(ssh_mute)]\n for layer in layers:\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox('snmp_%s' % layer, snmp_host, snmp_nodes[layer], {\n 'layer': [layer]}, labels={'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox('ssh_%s_%s' % (layer, ssh_host), fqdn, ssh_nodes\n [layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n external = {'job_name': 'external', 'file_sd_configs': [{'files': [\n '/etc/prometheus/external/*.yaml']}]}\n scrape_configs.append(external)\n if host.endswith('.event.dreamhack.se'):\n puppet = {'job_name': 'puppet_runs', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s',\n 'static_configs': [{'targets': ['puppet.tech.dreamhack.se:9100']}]}\n scrape_configs.append(puppet)\n vcenter = {'job_name': 'vmware_vcenter', 'metrics_path': '/metrics',\n 'scrape_interval': '60s', 'scrape_timeout': '55s', 'static_configs':\n [{'targets': ['provision.event.dreamhack.se:9272']}]}\n scrape_configs.append(vcenter)\n relabel = {'regex': ':?([^:]*):?.*', 'separator': ':', 'replacement':\n '${1}', 'source_labels': ['host', 'instance'], 'target_label': 'host'}\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n info = {}\n local_targets = []\n local_targets.append({'job_name': 'prometheus', 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n info['prometheus']['current_event'] = lib.get_current_event()\n return info\n",
"step-5": "# Copyright 2018 dhtech\n#\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file\nimport lib\nimport urlparse\nimport yaml\n\n\nMANIFEST_PATH = '/etc/manifest'\nHTTP_BASIC_AUTH = None\n\n\ndef blackbox(name, backend, targets, params,\n target='target', path='/probe', labels=None):\n labels = {} if labels is None else labels\n # Strip banned OSes\n banned_oses = ['debian']\n filtered_targets = [x for x in targets if lib.get_os(x) not in banned_oses]\n return {\n 'job_name': name,\n 'metrics_path': path,\n 'params': params,\n 'static_configs': [{\n 'targets': sorted(filtered_targets),\n 'labels': labels\n }],\n 'relabel_configs': [{\n 'source_labels': ['__address__'],\n 'regex': '(.*)(:80)?',\n 'target_label': '__param_%s' % target,\n 'replacement': '${1}',\n }, {\n 'source_labels': ['__param_%s' % target],\n 'regex': '(.*)',\n 'target_label': 'instance',\n 'replacement': '${1}',\n }, {\n 'source_labels': [],\n 'regex': '.*',\n 'target_label': '__address__',\n 'replacement': backend,\n }]\n }\n\n\ndef generate_backend(host, local_services):\n scrape_configs = []\n scrape_configs.extend(local_services)\n domain = lib.get_domain(host)\n\n basic_auth = lib.read_secret('services/monitoring:login')\n\n # Find services that wants to be monitored\n manifest = yaml.load(file(MANIFEST_PATH).read())\n for package, spec in manifest['packages'].iteritems():\n if spec is None or 'monitor' not in spec:\n continue\n\n urls = (spec['monitor']['url']\n if isinstance(spec['monitor']['url'], dict) else\n {None: spec['monitor']['url']})\n for url_id, url_str in urls.iteritems():\n url = urlparse.urlparse(url_str)\n targets = []\n for target in sorted(\n lib.get_nodes_with_package(package, domain).keys()):\n targets.append(target if url.port is None else '%s:%d' % (\n target, url.port))\n scrape_config = {\n 'job_name': package + ('-%s' % url_id if url_id else ''),\n 'metrics_path': url.path,\n 'scheme': url.scheme,\n 'static_configs': [\n {'targets': sorted(targets)}\n ],\n }\n if 'interval' in spec['monitor']:\n scrape_config['scrape_interval'] = spec['monitor']['interval']\n if 'labels' in spec['monitor']:\n scrape_config['static_configs'][0]['labels'] = spec['monitor']['labels']\n # Only allow authentication over https\n if spec['monitor'].get('auth', False) and url.scheme == 'https':\n scrape_config['basic_auth'] = basic_auth\n scrape_configs.append(scrape_config)\n\n # Layer specific monitoring\n layers = lib.get_layers(domain)\n\n snmp_nodes = {}\n ssh_nodes = {}\n for layer in layers:\n hosts = lib.get_nodes_with_layer(layer, domain)\n snmp_mute = lib.get_nodes_with_layer(layer, domain, 'no-snmp')\n ssh_mute = lib.get_nodes_with_layer(layer, domain, 'no-ssh')\n snmp_nodes[layer] = list(set(hosts) - set(snmp_mute))\n ssh_nodes[layer] = [x+':22' for x in set(hosts) - set(ssh_mute)]\n\n # SNMP\n for layer in layers:\n # TODO(bluecmd): Use options for this\n if layer == 'access':\n snmp_host = 'snmp2.event.dreamhack.se'\n else:\n snmp_host = 'snmp1.event.dreamhack.se'\n snmp = blackbox(\n 'snmp_%s' % layer, snmp_host,\n snmp_nodes[layer], {'layer': [layer]}, labels={\n 'layer': layer})\n snmp['scrape_interval'] = '30s'\n snmp['scrape_timeout'] = '30s'\n scrape_configs.append(snmp)\n\n # SSH\n for layer in layers:\n for ssh_host in ['jumpgate1', 'jumpgate2', 'rancid']:\n fqdn = ssh_host + '.event.dreamhack.se:9115'\n ssh = blackbox(\n 'ssh_%s_%s' % (layer, ssh_host), fqdn,\n ssh_nodes[layer], {'module': ['ssh_banner']}, labels={'layer': layer})\n ssh['scrape_interval'] = '30s'\n ssh['scrape_timeout'] = '30s'\n scrape_configs.append(ssh)\n\n # Add external service-discovery\n external = {\n 'job_name': 'external',\n 'file_sd_configs': [{\n 'files': ['/etc/prometheus/external/*.yaml'],\n }],\n }\n scrape_configs.append(external)\n\n if host.endswith('.event.dreamhack.se'):\n # Event should scrape puppet.tech.dreamhack.se to get information about\n # puppet runs\n puppet = {\n 'job_name': 'puppet_runs',\n 'metrics_path': '/metrics',\n 'scrape_interval': '60s',\n 'scrape_timeout': '55s',\n 'static_configs': [{\n 'targets': ['puppet.tech.dreamhack.se:9100'],\n }],\n }\n scrape_configs.append(puppet)\n\n vcenter = {\n 'job_name': 'vmware_vcenter',\n 'metrics_path': '/metrics',\n 'scrape_interval': '60s',\n 'scrape_timeout': '55s',\n 'static_configs': [{\n 'targets': ['provision.event.dreamhack.se:9272'],\n }],\n }\n scrape_configs.append(vcenter)\n\n # Make sure that all metrics have a host label.\n # This rule uses the existing host label if there is one,\n # stripping of the port (which shouldn't be part of the host label anyway)\n # *or* if that label does not exist it uses the instance label\n # (again stripping of the port)\n relabel = {\n 'regex': r':?([^:]*):?.*',\n 'separator': ':',\n 'replacement': '${1}',\n 'source_labels': ['host', 'instance'],\n 'target_label': 'host',\n }\n\n mrc = 'metric_relabel_configs'\n for scrape in scrape_configs:\n if mrc in scrape:\n scrape[mrc].append(relabel)\n else:\n scrape[mrc] = [relabel]\n return {'scrape_configs': scrape_configs}\n\n\ndef requires(host, *args):\n return ['apache(ldap)']\n\n\ndef generate(host, *args):\n\n info = {}\n\n local_targets = []\n local_targets.append({\n 'job_name': 'prometheus',\n 'scheme': 'http',\n 'static_configs': [{'targets': ['localhost:9090']}]})\n info['prometheus'] = generate_backend(host, local_targets)\n\n # Get current event\n info['prometheus']['current_event'] = lib.get_current_event()\n\n return info\n\n# vim: ts=4: sts=4: sw=4: expandtab\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
print('test 123123')
|
normal
|
{
"blob_id": "c6d8b9faa610e817c449eee94d73c61cb62fa272",
"index": 8878,
"step-1": "<mask token>\n",
"step-2": "print('test 123123')\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import os, random, string
from django.conf import settings
from django.template.loader import render_to_string
from django.core.mail import send_mail
def generate_temp_password():
length = 7
chars = string.ascii_letters + string.digits
rnd = random.SystemRandom()
return ''.join(rnd.choice(chars) for i in range(length))
def send_confirmation_email(user):
#Bug in simple_email_confirmation: refer to https://github.com/mfogel/django-simple-email-confirmation/issues/22
try:
confirmation_key = user.confirmation_key
except:
confirmation_key = user.add_unconfirmed_email(user.email)
msg_txt=render_to_string('email/confirmation.txt', {'SITE_URL': settings.SITE_URL, 'user': user.email, 'key' : confirmation_key})
msg_html = render_to_string('email/confirmation.html', {'SITE_URL': settings.SITE_URL, 'user': user.email, 'key' : confirmation_key})
return send_mail('Confirmation email',msg_txt,'[email protected]',[user.email],html_message=msg_html,)
|
normal
|
{
"blob_id": "822fc2941099cb9d7791580678cfb2a89a987175",
"index": 4685,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef send_confirmation_email(user):\n try:\n confirmation_key = user.confirmation_key\n except:\n confirmation_key = user.add_unconfirmed_email(user.email)\n msg_txt = render_to_string('email/confirmation.txt', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n msg_html = render_to_string('email/confirmation.html', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n return send_mail('Confirmation email', msg_txt,\n '[email protected]', [user.email], html_message=msg_html)\n",
"step-3": "<mask token>\n\n\ndef generate_temp_password():\n length = 7\n chars = string.ascii_letters + string.digits\n rnd = random.SystemRandom()\n return ''.join(rnd.choice(chars) for i in range(length))\n\n\ndef send_confirmation_email(user):\n try:\n confirmation_key = user.confirmation_key\n except:\n confirmation_key = user.add_unconfirmed_email(user.email)\n msg_txt = render_to_string('email/confirmation.txt', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n msg_html = render_to_string('email/confirmation.html', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n return send_mail('Confirmation email', msg_txt,\n '[email protected]', [user.email], html_message=msg_html)\n",
"step-4": "import os, random, string\nfrom django.conf import settings\nfrom django.template.loader import render_to_string\nfrom django.core.mail import send_mail\n\n\ndef generate_temp_password():\n length = 7\n chars = string.ascii_letters + string.digits\n rnd = random.SystemRandom()\n return ''.join(rnd.choice(chars) for i in range(length))\n\n\ndef send_confirmation_email(user):\n try:\n confirmation_key = user.confirmation_key\n except:\n confirmation_key = user.add_unconfirmed_email(user.email)\n msg_txt = render_to_string('email/confirmation.txt', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n msg_html = render_to_string('email/confirmation.html', {'SITE_URL':\n settings.SITE_URL, 'user': user.email, 'key': confirmation_key})\n return send_mail('Confirmation email', msg_txt,\n '[email protected]', [user.email], html_message=msg_html)\n",
"step-5": "import os, random, string\nfrom django.conf import settings\nfrom django.template.loader import render_to_string\nfrom django.core.mail import send_mail\n\ndef generate_temp_password(): \n length = 7\n chars = string.ascii_letters + string.digits\n rnd = random.SystemRandom()\n return ''.join(rnd.choice(chars) for i in range(length))\n\ndef send_confirmation_email(user):\n #Bug in simple_email_confirmation: refer to https://github.com/mfogel/django-simple-email-confirmation/issues/22\n try: \n confirmation_key = user.confirmation_key\n except:\n confirmation_key = user.add_unconfirmed_email(user.email)\n msg_txt=render_to_string('email/confirmation.txt', {'SITE_URL': settings.SITE_URL, 'user': user.email, 'key' : confirmation_key})\n msg_html = render_to_string('email/confirmation.html', {'SITE_URL': settings.SITE_URL, 'user': user.email, 'key' : confirmation_key})\n return send_mail('Confirmation email',msg_txt,'[email protected]',[user.email],html_message=msg_html,)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
ulang = 'y'
while True :
a = int(input ("masukkan nilai = "))
if a > 60 :
status = "LULUS"
elif a <= 60 :
status = "TIDAK LULUS"
print(status)
ulang = input("apakah anda ingin mengulang? y/n = ")
|
normal
|
{
"blob_id": "759b440bf436afbfb081cf55eeb4a0f075ed3e6d",
"index": 9577,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile True:\n a = int(input('masukkan nilai = '))\n if a > 60:\n status = 'LULUS'\n elif a <= 60:\n status = 'TIDAK LULUS'\n print(status)\n ulang = input('apakah anda ingin mengulang? y/n = ')\n",
"step-3": "ulang = 'y'\nwhile True:\n a = int(input('masukkan nilai = '))\n if a > 60:\n status = 'LULUS'\n elif a <= 60:\n status = 'TIDAK LULUS'\n print(status)\n ulang = input('apakah anda ingin mengulang? y/n = ')\n",
"step-4": "ulang = 'y'\r\nwhile True :\r\n\ta = int(input (\"masukkan nilai = \"))\r\n\r\n\tif a > 60 :\r\n\t\tstatus = \"LULUS\"\r\n\telif a <= 60 :\r\n\t\tstatus = \"TIDAK LULUS\"\r\n\tprint(status)\r\n\r\n\tulang = input(\"apakah anda ingin mengulang? y/n = \")",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
{'ivy': {'svm': ({'kernel': 'rbf', 'C': 10.0}, 0.034482758620689662, 0.035087719298245612), 'tuned_ensemble': ({'svm__C': 100000.0, 'rf__n_estimators': 101, 'cart__min_samples_leaf': 7, 'knn__n_neighbors': 2, 'rf__random_state': 1542, 'cart__max_depth': 33, 'cart__max_features': 0.35714285714285721, 'svm__kernel': 'sigmoid', 'rf__max_leaf_nodes': 2, 'rf__min_samples_split': 11, 'cart__random_state': 1542, 'nb__priors': None, 'knn__weights': 'uniform', 'rf__min_samples_leaf': 16, 'rf__max_features': 0.43979591836734699, 'cart__min_samples_split': 18}, 0.28915662650602408, 0.34146341463414637), 'nb': ({'priors': None}, 0.3529411764705882, 0.3529411764705882), 'best_param_ensemble': ({}, 0.28915662650602408, 0.2988505747126437), 'rf': ({'min_samples_split': 17, 'min_samples_leaf': 1, 'n_estimators': 61, 'random_state': 1542, 'max_leaf_nodes': 46, 'max_features': 0.94489795918367347}, 0.27083333333333337, 0.38095238095238099), 'cart': ({'max_depth': 50, 'random_state': 1542, 'max_features': 0.19183673469387758, 'min_samples_split': 13, 'min_samples_leaf': 5}, 0.31192660550458717, 0.2105263157894737), 'knn': ({'n_neighbors': 8, 'weights': 'uniform'}, 0.23529411764705882, 0.23749999999999996)}}
|
normal
|
{
"blob_id": "fa02fb701b59728671a7e87147adaeb33422dcdb",
"index": 1600,
"step-1": "<mask token>\n",
"step-2": "{'ivy': {'svm': ({'kernel': 'rbf', 'C': 10.0}, 0.03448275862068966, \n 0.03508771929824561), 'tuned_ensemble': ({'svm__C': 100000.0,\n 'rf__n_estimators': 101, 'cart__min_samples_leaf': 7,\n 'knn__n_neighbors': 2, 'rf__random_state': 1542, 'cart__max_depth': 33,\n 'cart__max_features': 0.3571428571428572, 'svm__kernel': 'sigmoid',\n 'rf__max_leaf_nodes': 2, 'rf__min_samples_split': 11,\n 'cart__random_state': 1542, 'nb__priors': None, 'knn__weights':\n 'uniform', 'rf__min_samples_leaf': 16, 'rf__max_features': \n 0.439795918367347, 'cart__min_samples_split': 18}, 0.2891566265060241, \n 0.34146341463414637), 'nb': ({'priors': None}, 0.3529411764705882, \n 0.3529411764705882), 'best_param_ensemble': ({}, 0.2891566265060241, \n 0.2988505747126437), 'rf': ({'min_samples_split': 17,\n 'min_samples_leaf': 1, 'n_estimators': 61, 'random_state': 1542,\n 'max_leaf_nodes': 46, 'max_features': 0.9448979591836735}, \n 0.27083333333333337, 0.380952380952381), 'cart': ({'max_depth': 50,\n 'random_state': 1542, 'max_features': 0.19183673469387758,\n 'min_samples_split': 13, 'min_samples_leaf': 5}, 0.3119266055045872, \n 0.2105263157894737), 'knn': ({'n_neighbors': 8, 'weights': 'uniform'}, \n 0.23529411764705882, 0.23749999999999996)}}\n",
"step-3": "{'ivy': {'svm': ({'kernel': 'rbf', 'C': 10.0}, 0.034482758620689662, 0.035087719298245612), 'tuned_ensemble': ({'svm__C': 100000.0, 'rf__n_estimators': 101, 'cart__min_samples_leaf': 7, 'knn__n_neighbors': 2, 'rf__random_state': 1542, 'cart__max_depth': 33, 'cart__max_features': 0.35714285714285721, 'svm__kernel': 'sigmoid', 'rf__max_leaf_nodes': 2, 'rf__min_samples_split': 11, 'cart__random_state': 1542, 'nb__priors': None, 'knn__weights': 'uniform', 'rf__min_samples_leaf': 16, 'rf__max_features': 0.43979591836734699, 'cart__min_samples_split': 18}, 0.28915662650602408, 0.34146341463414637), 'nb': ({'priors': None}, 0.3529411764705882, 0.3529411764705882), 'best_param_ensemble': ({}, 0.28915662650602408, 0.2988505747126437), 'rf': ({'min_samples_split': 17, 'min_samples_leaf': 1, 'n_estimators': 61, 'random_state': 1542, 'max_leaf_nodes': 46, 'max_features': 0.94489795918367347}, 0.27083333333333337, 0.38095238095238099), 'cart': ({'max_depth': 50, 'random_state': 1542, 'max_features': 0.19183673469387758, 'min_samples_split': 13, 'min_samples_leaf': 5}, 0.31192660550458717, 0.2105263157894737), 'knn': ({'n_neighbors': 8, 'weights': 'uniform'}, 0.23529411764705882, 0.23749999999999996)}}",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# -*- coding: utf-8 -*-
import tensorflow as tf
from yolov3 import *
from predict import predict
from load import Weight_loader
class Yolo(Yolov3):
sess = tf.Session()
def __init__(self, input=None, weight_path=None, is_training=False):
self.is_training = is_training
try:
self.defrost()
self.input = tf.get_default_graph().get_tensor_by_name('import/input:0')
self.output = tf.get_default_graph().get_tensor_by_name('import/detections/output:0')
except:
if not input:
input = tf.placeholder(tf.float32, [None, 416, 416, 3], 'input')
self.input = input
self.input_size = self.input.get_shape().as_list()[1]
with tf.variable_scope('detections'):
self.output = self.graph()
self.loader = Weight_loader(tf.global_variables('detections'), weight_path)
# self.sess.run(tf.global_variables_initializer())
self.sess.run(self.loader.load_now())
self.freeze()
def predict(self, input_list, confidence_theshold=.6, iou_threshold=.5):
feed_dict = {self.input: input_list}
batch_detections = self.sess.run(self.output, feed_dict)
return predict(batch_detections, confidence_theshold, iou_threshold)
def freeze(self):
graph_def = tf.graph_util.convert_variables_to_constants(sess=self.sess,
input_graph_def=tf.get_default_graph().as_graph_def(),
output_node_names=['detections/output'])
with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:
f.write(graph_def.SerializeToString())
def defrost(self):
with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
print('Found a frozen yolov3 model, defrost and use!')
tf.import_graph_def(graph_def)
|
normal
|
{
"blob_id": "f3d34379cc7fbfe211eeebec424112f3da0ab724",
"index": 7999,
"step-1": "<mask token>\n\n\nclass Yolo(Yolov3):\n <mask token>\n <mask token>\n <mask token>\n\n def freeze(self):\n graph_def = tf.graph_util.convert_variables_to_constants(sess=self.\n sess, input_graph_def=tf.get_default_graph().as_graph_def(),\n output_node_names=['detections/output'])\n with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:\n f.write(graph_def.SerializeToString())\n\n def defrost(self):\n with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n print('Found a frozen yolov3 model, defrost and use!')\n tf.import_graph_def(graph_def)\n",
"step-2": "<mask token>\n\n\nclass Yolo(Yolov3):\n <mask token>\n <mask token>\n\n def predict(self, input_list, confidence_theshold=0.6, iou_threshold=0.5):\n feed_dict = {self.input: input_list}\n batch_detections = self.sess.run(self.output, feed_dict)\n return predict(batch_detections, confidence_theshold, iou_threshold)\n\n def freeze(self):\n graph_def = tf.graph_util.convert_variables_to_constants(sess=self.\n sess, input_graph_def=tf.get_default_graph().as_graph_def(),\n output_node_names=['detections/output'])\n with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:\n f.write(graph_def.SerializeToString())\n\n def defrost(self):\n with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n print('Found a frozen yolov3 model, defrost and use!')\n tf.import_graph_def(graph_def)\n",
"step-3": "<mask token>\n\n\nclass Yolo(Yolov3):\n sess = tf.Session()\n\n def __init__(self, input=None, weight_path=None, is_training=False):\n self.is_training = is_training\n try:\n self.defrost()\n self.input = tf.get_default_graph().get_tensor_by_name(\n 'import/input:0')\n self.output = tf.get_default_graph().get_tensor_by_name(\n 'import/detections/output:0')\n except:\n if not input:\n input = tf.placeholder(tf.float32, [None, 416, 416, 3], 'input'\n )\n self.input = input\n self.input_size = self.input.get_shape().as_list()[1]\n with tf.variable_scope('detections'):\n self.output = self.graph()\n self.loader = Weight_loader(tf.global_variables('detections'),\n weight_path)\n self.sess.run(self.loader.load_now())\n self.freeze()\n\n def predict(self, input_list, confidence_theshold=0.6, iou_threshold=0.5):\n feed_dict = {self.input: input_list}\n batch_detections = self.sess.run(self.output, feed_dict)\n return predict(batch_detections, confidence_theshold, iou_threshold)\n\n def freeze(self):\n graph_def = tf.graph_util.convert_variables_to_constants(sess=self.\n sess, input_graph_def=tf.get_default_graph().as_graph_def(),\n output_node_names=['detections/output'])\n with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:\n f.write(graph_def.SerializeToString())\n\n def defrost(self):\n with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n print('Found a frozen yolov3 model, defrost and use!')\n tf.import_graph_def(graph_def)\n",
"step-4": "import tensorflow as tf\nfrom yolov3 import *\nfrom predict import predict\nfrom load import Weight_loader\n\n\nclass Yolo(Yolov3):\n sess = tf.Session()\n\n def __init__(self, input=None, weight_path=None, is_training=False):\n self.is_training = is_training\n try:\n self.defrost()\n self.input = tf.get_default_graph().get_tensor_by_name(\n 'import/input:0')\n self.output = tf.get_default_graph().get_tensor_by_name(\n 'import/detections/output:0')\n except:\n if not input:\n input = tf.placeholder(tf.float32, [None, 416, 416, 3], 'input'\n )\n self.input = input\n self.input_size = self.input.get_shape().as_list()[1]\n with tf.variable_scope('detections'):\n self.output = self.graph()\n self.loader = Weight_loader(tf.global_variables('detections'),\n weight_path)\n self.sess.run(self.loader.load_now())\n self.freeze()\n\n def predict(self, input_list, confidence_theshold=0.6, iou_threshold=0.5):\n feed_dict = {self.input: input_list}\n batch_detections = self.sess.run(self.output, feed_dict)\n return predict(batch_detections, confidence_theshold, iou_threshold)\n\n def freeze(self):\n graph_def = tf.graph_util.convert_variables_to_constants(sess=self.\n sess, input_graph_def=tf.get_default_graph().as_graph_def(),\n output_node_names=['detections/output'])\n with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:\n f.write(graph_def.SerializeToString())\n\n def defrost(self):\n with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n print('Found a frozen yolov3 model, defrost and use!')\n tf.import_graph_def(graph_def)\n",
"step-5": "# -*- coding: utf-8 -*-\nimport tensorflow as tf\nfrom yolov3 import *\nfrom predict import predict\nfrom load import Weight_loader\n\nclass Yolo(Yolov3):\n\n sess = tf.Session()\n \n def __init__(self, input=None, weight_path=None, is_training=False):\n self.is_training = is_training\n try:\n self.defrost()\n self.input = tf.get_default_graph().get_tensor_by_name('import/input:0')\n self.output = tf.get_default_graph().get_tensor_by_name('import/detections/output:0')\n except:\n if not input:\n input = tf.placeholder(tf.float32, [None, 416, 416, 3], 'input')\n self.input = input\n self.input_size = self.input.get_shape().as_list()[1]\n with tf.variable_scope('detections'):\n self.output = self.graph() \n self.loader = Weight_loader(tf.global_variables('detections'), weight_path)\n # self.sess.run(tf.global_variables_initializer())\n self.sess.run(self.loader.load_now())\n self.freeze()\n\n def predict(self, input_list, confidence_theshold=.6, iou_threshold=.5):\n feed_dict = {self.input: input_list}\n batch_detections = self.sess.run(self.output, feed_dict)\n return predict(batch_detections, confidence_theshold, iou_threshold)\n\n def freeze(self):\n graph_def = tf.graph_util.convert_variables_to_constants(sess=self.sess,\n input_graph_def=tf.get_default_graph().as_graph_def(),\n output_node_names=['detections/output'])\n with tf.gfile.GFile('frozen_yolo.pb', 'wb') as f:\n f.write(graph_def.SerializeToString())\n\n def defrost(self):\n with tf.gfile.GFile('frozen_yolo.pb', 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n print('Found a frozen yolov3 model, defrost and use!') \n tf.import_graph_def(graph_def)\n",
"step-ids": [
3,
4,
6,
7,
8
]
}
|
[
3,
4,
6,
7,
8
] |
def merge_the_tools(string, k):
if(len(string)%k != 0):
exit()
else:
L = []
for i in range(0, len(string), k):
L.append(''.join(list(dict.fromkeys(string[i:i+k]))))
print('\n'.join(L))
if __name__ == '__main__':
string, k = input(), int(input())
merge_the_tools(string, k)
# S, N = input(), int(input())
# for part in zip(*[iter(S)] * N):
# asterisk unpacks a list. Example: print(*[1,2,3,4]) = print(1,2,3,4)
# [iter(s)]*n makes a list of n times the same iterator for s.
# Example: [[iter(s)]*3] = ([iter(s), iter(s), iter(s)])
# if s = 'abcdefghi', then zip(*[iter(s)]*3) will have the following effect:
# a,b,c,d,e,f,g,h,i a,b,c,d,e,f,g,h,i a,b,c,d,e,f,g,h,i
# ^ ^ ^
# ^ ^ ^
# ^ ^ ^
# d = dict()
# print(''.join([ d.setdefault(c, c) for c in part if c not in d ]))
|
normal
|
{
"blob_id": "0004e90622f8b13ec7ce0c1f49e8c8df7ea07269",
"index": 7098,
"step-1": "<mask token>\n",
"step-2": "def merge_the_tools(string, k):\n if len(string) % k != 0:\n exit()\n else:\n L = []\n for i in range(0, len(string), k):\n L.append(''.join(list(dict.fromkeys(string[i:i + k]))))\n print('\\n'.join(L))\n\n\n<mask token>\n",
"step-3": "def merge_the_tools(string, k):\n if len(string) % k != 0:\n exit()\n else:\n L = []\n for i in range(0, len(string), k):\n L.append(''.join(list(dict.fromkeys(string[i:i + k]))))\n print('\\n'.join(L))\n\n\nif __name__ == '__main__':\n string, k = input(), int(input())\n merge_the_tools(string, k)\n",
"step-4": "def merge_the_tools(string, k):\n if(len(string)%k != 0):\n exit()\n else:\n L = []\n for i in range(0, len(string), k):\n L.append(''.join(list(dict.fromkeys(string[i:i+k]))))\n print('\\n'.join(L))\n\nif __name__ == '__main__':\n\n string, k = input(), int(input())\n merge_the_tools(string, k)\n\n\n# S, N = input(), int(input())\n# for part in zip(*[iter(S)] * N):\n# asterisk unpacks a list. Example: print(*[1,2,3,4]) = print(1,2,3,4)\n# [iter(s)]*n makes a list of n times the same iterator for s.\n# Example: [[iter(s)]*3] = ([iter(s), iter(s), iter(s)])\n# if s = 'abcdefghi', then zip(*[iter(s)]*3) will have the following effect:\n# a,b,c,d,e,f,g,h,i a,b,c,d,e,f,g,h,i a,b,c,d,e,f,g,h,i\n# ^ ^ ^\n# ^ ^ ^\n# ^ ^ ^\n# d = dict()\n# print(''.join([ d.setdefault(c, c) for c in part if c not in d ]))\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# ---------------------MODULE 1 notes--------------------
# .
# .
# .
# .
# .
# .
# .
# .
# .
# .
# save as (file).py first if not it will not work
print("Hello")
# control s to save
|
normal
|
{
"blob_id": "bb64da929ff2e1e04267518ec93a28bedb5a4de5",
"index": 7306,
"step-1": "<mask token>\n",
"step-2": "print('Hello')\n",
"step-3": "# ---------------------MODULE 1 notes--------------------\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n# .\r\n\r\n# save as (file).py first if not it will not work\r\nprint(\"Hello\")\r\n\r\n# control s to save\r\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pandas as pd
file = pd.read_csv("KDDTest+.csv")
with open("test_9feats.csv", "w") as f:
df = pd.DataFrame(file,
columns=[
"dst_host_srv_serror_rate", "dst_host_serror_rate",
"serror_rate", "srv_serror_rate", "count", "flag",
"same_srv_rate", "dst_host_srv_count",
"dst_host_diff_srv_rate", "Malicious"
])
df.to_csv(f, index=False, header=True, line_terminator='\n')
print(df)
|
normal
|
{
"blob_id": "ce28330db66dcdfad63bdac698ce9d285964d288",
"index": 5124,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('test_9feats.csv', 'w') as f:\n df = pd.DataFrame(file, columns=['dst_host_srv_serror_rate',\n 'dst_host_serror_rate', 'serror_rate', 'srv_serror_rate', 'count',\n 'flag', 'same_srv_rate', 'dst_host_srv_count',\n 'dst_host_diff_srv_rate', 'Malicious'])\n df.to_csv(f, index=False, header=True, line_terminator='\\n')\n print(df)\n",
"step-3": "<mask token>\nfile = pd.read_csv('KDDTest+.csv')\nwith open('test_9feats.csv', 'w') as f:\n df = pd.DataFrame(file, columns=['dst_host_srv_serror_rate',\n 'dst_host_serror_rate', 'serror_rate', 'srv_serror_rate', 'count',\n 'flag', 'same_srv_rate', 'dst_host_srv_count',\n 'dst_host_diff_srv_rate', 'Malicious'])\n df.to_csv(f, index=False, header=True, line_terminator='\\n')\n print(df)\n",
"step-4": "import pandas as pd\nfile = pd.read_csv('KDDTest+.csv')\nwith open('test_9feats.csv', 'w') as f:\n df = pd.DataFrame(file, columns=['dst_host_srv_serror_rate',\n 'dst_host_serror_rate', 'serror_rate', 'srv_serror_rate', 'count',\n 'flag', 'same_srv_rate', 'dst_host_srv_count',\n 'dst_host_diff_srv_rate', 'Malicious'])\n df.to_csv(f, index=False, header=True, line_terminator='\\n')\n print(df)\n",
"step-5": "import pandas as pd\n\nfile = pd.read_csv(\"KDDTest+.csv\")\nwith open(\"test_9feats.csv\", \"w\") as f:\n df = pd.DataFrame(file,\n columns=[\n \"dst_host_srv_serror_rate\", \"dst_host_serror_rate\",\n \"serror_rate\", \"srv_serror_rate\", \"count\", \"flag\",\n \"same_srv_rate\", \"dst_host_srv_count\",\n \"dst_host_diff_srv_rate\", \"Malicious\"\n ])\n df.to_csv(f, index=False, header=True, line_terminator='\\n')\n print(df)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.db.models import Sum, Count
from django.db.models.functions import Coalesce
from django.utils.timezone import localtime
from .models import Quote, Vote
import pygal
from pygal.style import Style
style = Style(
background='transparent',
plot_background='transparent',
foreground='#3d3d3d',
foreground_strong='#303030',
foreground_subtle='#939393',
opacity='.8',
opacity_hover='.9',
colors=('#fa5555', '#888'),
label_font_size=15,
major_label_font_size=15,
title_font_size=20,
legend_font_size=15
)
MONTHS = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')
quotes = Quote.objects.annotate(score=Coalesce(Sum('vote__value'), 0), votes=Count('vote')).filter(approved=True)
votes = Vote.objects
class QuotesOverTime():
def __init__(self):
self.chart = pygal.DateTimeLine(
title='Quotes over Time',
x_label_rotation=90,
x_value_formatter=lambda dt: dt.strftime('%b %Y'),
margin=20,
show_legend=False,
show_dots=False,
fill=True,
style=style
)
def pull(self):
data = {}
for quote in quotes.order_by('timestamp'):
timestamp = quote.timestamp.timestamp()
data[timestamp] = data.get(timestamp, 0)
data[timestamp] += 1
return data
def generate(self):
data = self.pull()
points = []
total = 0
for key, value in data.items():
points.append((key, total))
total += value
self.chart.add('quotes', points)
return self.chart.render(is_unicode=True)
class QuotesByHour():
def __init__(self):
self.chart = pygal.Bar(
title='Quotes by Hour',
x_labels = list(map(str, range(24))),
margin=20,
show_legend=False,
style=style
)
def pull(self):
data = [0 for _ in range(24)]
for quote in quotes:
data[localtime(quote.timestamp).hour] += 1
return data
def generate(self):
data = self.pull()
self.chart.add('quotes', data)
return self.chart.render(is_unicode=True)
class QuotesByMonth():
def __init__(self):
self.chart = pygal.Bar(
title='Quotes by Month',
x_labels = MONTHS,
margin=20,
show_legend=False,
style=style
)
def pull(self):
data = [0 for _ in range(12)]
for quote in quotes:
data[localtime(quote.timestamp).month-1] += 1
return data
def generate(self):
data = self.pull()
self.chart.add('quotes', data)
return self.chart.render(is_unicode=True)
class QuotesByRating():
def __init__(self):
self.chart = pygal.Histogram(
title='Quotes by Rating',
margin=20,
show_legend=False,
style=style
)
def pull(self):
data = {}
for quote in quotes:
data[quote.score] = data.get(quote.score, 0)
data[quote.score] += 1
return data
def generate(self):
data = self.pull()
bars = []
for key, value in data.items():
bars.append((value, key, key+1))
self.chart.add('quotes', bars)
return self.chart.render(is_unicode=True)
class VoteDistribution():
def __init__(self):
self.chart = pygal.Pie(
title='Vote Distribution',
margin=20,
inner_radius=.7,
style=style
)
def pull(self):
data = {}
up = votes.filter(value=1).count()
down = votes.filter(value=-1).count()
data['up'] = up
data['down'] = down
return data
def generate(self):
data = self.pull()
for key, value in data.items():
self.chart.add('{} ({})'.format(key, value), value)
return self.chart.render(is_unicode=True)
|
normal
|
{
"blob_id": "6f6f57ff317d7e3c6e6ae4d450c6fdf0e22eb4eb",
"index": 7256,
"step-1": "<mask token>\n\n\nclass QuotesByMonth:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass QuotesByRating:\n\n def __init__(self):\n self.chart = pygal.Histogram(title='Quotes by Rating', margin=20,\n show_legend=False, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes:\n data[quote.score] = data.get(quote.score, 0)\n data[quote.score] += 1\n return data\n\n def generate(self):\n data = self.pull()\n bars = []\n for key, value in data.items():\n bars.append((value, key, key + 1))\n self.chart.add('quotes', bars)\n return self.chart.render(is_unicode=True)\n\n\nclass VoteDistribution:\n\n def __init__(self):\n self.chart = pygal.Pie(title='Vote Distribution', margin=20,\n inner_radius=0.7, style=style)\n\n def pull(self):\n data = {}\n up = votes.filter(value=1).count()\n down = votes.filter(value=-1).count()\n data['up'] = up\n data['down'] = down\n return data\n\n def generate(self):\n data = self.pull()\n for key, value in data.items():\n self.chart.add('{} ({})'.format(key, value), value)\n return self.chart.render(is_unicode=True)\n",
"step-2": "<mask token>\n\n\nclass QuotesByHour:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Hour', x_labels=list(map(\n str, range(24))), margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(24)]\n for quote in quotes:\n data[localtime(quote.timestamp).hour] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByMonth:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Month', x_labels=MONTHS,\n margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(12)]\n for quote in quotes:\n data[localtime(quote.timestamp).month - 1] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByRating:\n\n def __init__(self):\n self.chart = pygal.Histogram(title='Quotes by Rating', margin=20,\n show_legend=False, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes:\n data[quote.score] = data.get(quote.score, 0)\n data[quote.score] += 1\n return data\n\n def generate(self):\n data = self.pull()\n bars = []\n for key, value in data.items():\n bars.append((value, key, key + 1))\n self.chart.add('quotes', bars)\n return self.chart.render(is_unicode=True)\n\n\nclass VoteDistribution:\n\n def __init__(self):\n self.chart = pygal.Pie(title='Vote Distribution', margin=20,\n inner_radius=0.7, style=style)\n\n def pull(self):\n data = {}\n up = votes.filter(value=1).count()\n down = votes.filter(value=-1).count()\n data['up'] = up\n data['down'] = down\n return data\n\n def generate(self):\n data = self.pull()\n for key, value in data.items():\n self.chart.add('{} ({})'.format(key, value), value)\n return self.chart.render(is_unicode=True)\n",
"step-3": "<mask token>\n\n\nclass QuotesOverTime:\n\n def __init__(self):\n self.chart = pygal.DateTimeLine(title='Quotes over Time',\n x_label_rotation=90, x_value_formatter=lambda dt: dt.strftime(\n '%b %Y'), margin=20, show_legend=False, show_dots=False, fill=\n True, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes.order_by('timestamp'):\n timestamp = quote.timestamp.timestamp()\n data[timestamp] = data.get(timestamp, 0)\n data[timestamp] += 1\n return data\n\n def generate(self):\n data = self.pull()\n points = []\n total = 0\n for key, value in data.items():\n points.append((key, total))\n total += value\n self.chart.add('quotes', points)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByHour:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Hour', x_labels=list(map(\n str, range(24))), margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(24)]\n for quote in quotes:\n data[localtime(quote.timestamp).hour] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByMonth:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Month', x_labels=MONTHS,\n margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(12)]\n for quote in quotes:\n data[localtime(quote.timestamp).month - 1] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByRating:\n\n def __init__(self):\n self.chart = pygal.Histogram(title='Quotes by Rating', margin=20,\n show_legend=False, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes:\n data[quote.score] = data.get(quote.score, 0)\n data[quote.score] += 1\n return data\n\n def generate(self):\n data = self.pull()\n bars = []\n for key, value in data.items():\n bars.append((value, key, key + 1))\n self.chart.add('quotes', bars)\n return self.chart.render(is_unicode=True)\n\n\nclass VoteDistribution:\n\n def __init__(self):\n self.chart = pygal.Pie(title='Vote Distribution', margin=20,\n inner_radius=0.7, style=style)\n\n def pull(self):\n data = {}\n up = votes.filter(value=1).count()\n down = votes.filter(value=-1).count()\n data['up'] = up\n data['down'] = down\n return data\n\n def generate(self):\n data = self.pull()\n for key, value in data.items():\n self.chart.add('{} ({})'.format(key, value), value)\n return self.chart.render(is_unicode=True)\n",
"step-4": "<mask token>\nstyle = Style(background='transparent', plot_background='transparent',\n foreground='#3d3d3d', foreground_strong='#303030', foreground_subtle=\n '#939393', opacity='.8', opacity_hover='.9', colors=('#fa5555', '#888'),\n label_font_size=15, major_label_font_size=15, title_font_size=20,\n legend_font_size=15)\nMONTHS = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',\n 'Oct', 'Nov', 'Dec')\nquotes = Quote.objects.annotate(score=Coalesce(Sum('vote__value'), 0),\n votes=Count('vote')).filter(approved=True)\nvotes = Vote.objects\n\n\nclass QuotesOverTime:\n\n def __init__(self):\n self.chart = pygal.DateTimeLine(title='Quotes over Time',\n x_label_rotation=90, x_value_formatter=lambda dt: dt.strftime(\n '%b %Y'), margin=20, show_legend=False, show_dots=False, fill=\n True, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes.order_by('timestamp'):\n timestamp = quote.timestamp.timestamp()\n data[timestamp] = data.get(timestamp, 0)\n data[timestamp] += 1\n return data\n\n def generate(self):\n data = self.pull()\n points = []\n total = 0\n for key, value in data.items():\n points.append((key, total))\n total += value\n self.chart.add('quotes', points)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByHour:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Hour', x_labels=list(map(\n str, range(24))), margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(24)]\n for quote in quotes:\n data[localtime(quote.timestamp).hour] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByMonth:\n\n def __init__(self):\n self.chart = pygal.Bar(title='Quotes by Month', x_labels=MONTHS,\n margin=20, show_legend=False, style=style)\n\n def pull(self):\n data = [(0) for _ in range(12)]\n for quote in quotes:\n data[localtime(quote.timestamp).month - 1] += 1\n return data\n\n def generate(self):\n data = self.pull()\n self.chart.add('quotes', data)\n return self.chart.render(is_unicode=True)\n\n\nclass QuotesByRating:\n\n def __init__(self):\n self.chart = pygal.Histogram(title='Quotes by Rating', margin=20,\n show_legend=False, style=style)\n\n def pull(self):\n data = {}\n for quote in quotes:\n data[quote.score] = data.get(quote.score, 0)\n data[quote.score] += 1\n return data\n\n def generate(self):\n data = self.pull()\n bars = []\n for key, value in data.items():\n bars.append((value, key, key + 1))\n self.chart.add('quotes', bars)\n return self.chart.render(is_unicode=True)\n\n\nclass VoteDistribution:\n\n def __init__(self):\n self.chart = pygal.Pie(title='Vote Distribution', margin=20,\n inner_radius=0.7, style=style)\n\n def pull(self):\n data = {}\n up = votes.filter(value=1).count()\n down = votes.filter(value=-1).count()\n data['up'] = up\n data['down'] = down\n return data\n\n def generate(self):\n data = self.pull()\n for key, value in data.items():\n self.chart.add('{} ({})'.format(key, value), value)\n return self.chart.render(is_unicode=True)\n",
"step-5": "from django.db.models import Sum, Count\nfrom django.db.models.functions import Coalesce\nfrom django.utils.timezone import localtime\n\nfrom .models import Quote, Vote\n\nimport pygal\n\nfrom pygal.style import Style\nstyle = Style(\n\tbackground='transparent',\n\tplot_background='transparent',\n\tforeground='#3d3d3d',\n\tforeground_strong='#303030',\n\tforeground_subtle='#939393',\n\topacity='.8',\n\topacity_hover='.9',\n\tcolors=('#fa5555', '#888'),\n\tlabel_font_size=15,\n\tmajor_label_font_size=15,\n\ttitle_font_size=20,\n\tlegend_font_size=15\n)\n\nMONTHS = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')\n\nquotes = Quote.objects.annotate(score=Coalesce(Sum('vote__value'), 0), votes=Count('vote')).filter(approved=True)\nvotes = Vote.objects\n\nclass QuotesOverTime():\n\n\tdef __init__(self):\n\t\tself.chart = pygal.DateTimeLine(\n\t\t\ttitle='Quotes over Time',\n\t\t\tx_label_rotation=90,\n\t\t\tx_value_formatter=lambda dt: dt.strftime('%b %Y'),\n\t\t\tmargin=20,\n\t\t\tshow_legend=False,\n\t\t\tshow_dots=False,\n\t\t\tfill=True,\n\t\t\tstyle=style\n\t\t)\n\n\tdef pull(self):\n\t\tdata = {}\n\t\tfor quote in quotes.order_by('timestamp'):\n\t\t\ttimestamp = quote.timestamp.timestamp()\n\t\t\tdata[timestamp] = data.get(timestamp, 0)\n\t\t\tdata[timestamp] += 1\n\t\treturn data\n\n\tdef generate(self):\n\t\tdata = self.pull()\n\t\tpoints = []\n\t\ttotal = 0\n\t\tfor key, value in data.items():\n\t\t\tpoints.append((key, total))\n\t\t\ttotal += value\n\t\tself.chart.add('quotes', points)\n\t\treturn self.chart.render(is_unicode=True)\n\nclass QuotesByHour():\n\n\tdef __init__(self):\n\t\tself.chart = pygal.Bar(\n\t\t\ttitle='Quotes by Hour',\n\t\t\tx_labels = list(map(str, range(24))),\n\t\t\tmargin=20,\n\t\t\tshow_legend=False,\n\t\t\tstyle=style\n\t\t)\n\n\tdef pull(self):\n\t\tdata = [0 for _ in range(24)]\n\t\tfor quote in quotes:\n\t\t\tdata[localtime(quote.timestamp).hour] += 1\n\t\treturn data\n\n\tdef generate(self):\n\t\tdata = self.pull()\n\t\tself.chart.add('quotes', data)\n\t\treturn self.chart.render(is_unicode=True)\n\nclass QuotesByMonth():\n\n\tdef __init__(self):\n\t\tself.chart = pygal.Bar(\n\t\t\ttitle='Quotes by Month',\n\t\t\tx_labels = MONTHS,\n\t\t\tmargin=20,\n\t\t\tshow_legend=False,\n\t\t\tstyle=style\n\t\t)\n\n\tdef pull(self):\n\t\tdata = [0 for _ in range(12)]\n\t\tfor quote in quotes:\n\t\t\tdata[localtime(quote.timestamp).month-1] += 1\n\t\treturn data\n\n\tdef generate(self):\n\t\tdata = self.pull()\n\t\tself.chart.add('quotes', data)\n\t\treturn self.chart.render(is_unicode=True)\n\nclass QuotesByRating():\n\n\tdef __init__(self):\n\t\tself.chart = pygal.Histogram(\n\t\t\ttitle='Quotes by Rating',\n\t\t\tmargin=20,\n\t\t\tshow_legend=False,\n\t\t\tstyle=style\n\t\t)\n\n\tdef pull(self):\n\t\tdata = {}\n\t\tfor quote in quotes:\n\t\t\tdata[quote.score] = data.get(quote.score, 0)\n\t\t\tdata[quote.score] += 1\n\t\treturn data\n\n\tdef generate(self):\n\t\tdata = self.pull()\n\t\tbars = []\n\t\tfor key, value in data.items():\n\t\t\tbars.append((value, key, key+1))\n\t\tself.chart.add('quotes', bars)\n\t\treturn self.chart.render(is_unicode=True)\n\nclass VoteDistribution():\n\n\tdef __init__(self):\n\t\tself.chart = pygal.Pie(\n\t\t\ttitle='Vote Distribution',\n\t\t\tmargin=20,\n\t\t\tinner_radius=.7,\n\t\t\tstyle=style\n\t\t)\n\n\tdef pull(self):\n\t\tdata = {}\n\t\tup = votes.filter(value=1).count()\n\t\tdown = votes.filter(value=-1).count()\n\t\tdata['up'] = up\n\t\tdata['down'] = down\n\t\treturn data\n\n\tdef generate(self):\n\t\tdata = self.pull()\n\t\tfor key, value in data.items():\n\t\t\tself.chart.add('{} ({})'.format(key, value), value)\n\t\treturn self.chart.render(is_unicode=True)\n",
"step-ids": [
9,
16,
20,
21,
23
]
}
|
[
9,
16,
20,
21,
23
] |
import collections
import re
from collections import Counter
import operator
import pickle
import math
import json
path='C:/Users/rahul/Desktop/CSCI 544/HW 2/op_spam_train/'
#path=sys.argv[1]
badWordList = ['and','the','was','for']
RE=r'\b[^\W\d_]+\b'
# NEGATIVE TWEETS
c=collections.Counter()
NT="negativeTweets.txt/"
with open("negativeTweets.txt") as f:
c.update( word.lower() for line in f for word in re.findall(r'\b[^\W\d_]+\b', line) if len(word)>2 and word not in badWordList)
# POSITIVE TWEETS
d=collections.Counter()
PT="positiveTweets.txt/"
with open("positiveTweets.txt") as f:
d.update( word.lower() for line in f for word in re.findall(r'\b[^\W\d_]+\b', line) if len(word)>2 and word not in badWordList)
# Storing Counts in a dictionary nb
dicts=[dict(c),dict(d)]
nb,cnt={},0
for d in dicts:
for k, v in d.items():
if(k in nb): nb[k][cnt]= nb[k][cnt]+v
else:
nb[k]=[1,1]
nb[k][cnt]= nb[k][cnt]+v
cnt=cnt+1
for k,v in nb.items():
print k,v
print len(nb);
totalClassWord=[0,0]
for k, v in nb.items():
totalClassWord=[x + y for x, y in zip(totalClassWord, v)]
prob={}
for k, v in nb.items():
prob[k]=[0,0]
prob[k][0]= math.log10( float(nb[k][0])/float(totalClassWord[0]))
prob[k][1]= math.log10( float(nb[k][1])/float(totalClassWord[1]))
for k,v in prob.items():
print k,v
#Dumping dictionary as JSON object in file
#with open('hackTechTweetClassificationModel.txt', 'wb') as handle: pickle.dump(prob, handle)
keys=json.dumps(prob, sort_keys=True)
output_file=open('hackTechTweetClassificationModel.txt', 'w')
output_file.write(keys)
output_file.close()
output_file=open('hackTechTweetPHP.php', 'w')
#Format of PHP output
#$result=mysqli_query($con, "INSERT INTO ttrain VALUES ('aaa','-2.232','222.4234')" );
for k,v in prob.items():
strop="$result=mysqli_query($con, \"INSERT INTO ttrain VALUES (\'"+str(k)+"\',\'"+str(v[0])+"\',\'"+str(v[1])+"\')\" );\n"
output_file.write(strop)
output_file.close()
|
normal
|
{
"blob_id": "42e16def0fcf234f3d7c2709de36a321d8ddf29e",
"index": 7598,
"step-1": "import collections\nimport re\nfrom collections import Counter\nimport operator\nimport pickle\nimport math\nimport json\n\npath='C:/Users/rahul/Desktop/CSCI 544/HW 2/op_spam_train/'\n#path=sys.argv[1]\n\nbadWordList = ['and','the','was','for']\nRE=r'\\b[^\\W\\d_]+\\b'\n\n# NEGATIVE TWEETS\nc=collections.Counter()\nNT=\"negativeTweets.txt/\"\nwith open(\"negativeTweets.txt\") as f: \n c.update( word.lower() for line in f for word in re.findall(r'\\b[^\\W\\d_]+\\b', line) if len(word)>2 and word not in badWordList)\n\n# POSITIVE TWEETS\nd=collections.Counter()\nPT=\"positiveTweets.txt/\"\nwith open(\"positiveTweets.txt\") as f: \n d.update( word.lower() for line in f for word in re.findall(r'\\b[^\\W\\d_]+\\b', line) if len(word)>2 and word not in badWordList)\n\n# Storing Counts in a dictionary nb\ndicts=[dict(c),dict(d)] \nnb,cnt={},0\nfor d in dicts:\n for k, v in d.items():\n if(k in nb): nb[k][cnt]= nb[k][cnt]+v\n else: \n nb[k]=[1,1]\n nb[k][cnt]= nb[k][cnt]+v\n cnt=cnt+1\n\nfor k,v in nb.items():\n print k,v\n \nprint len(nb);\n\ntotalClassWord=[0,0]\nfor k, v in nb.items():\n totalClassWord=[x + y for x, y in zip(totalClassWord, v)]\n\nprob={} \nfor k, v in nb.items():\n prob[k]=[0,0]\n prob[k][0]= math.log10( float(nb[k][0])/float(totalClassWord[0]))\n prob[k][1]= math.log10( float(nb[k][1])/float(totalClassWord[1]))\n\nfor k,v in prob.items():\n print k,v\n\n#Dumping dictionary as JSON object in file\n#with open('hackTechTweetClassificationModel.txt', 'wb') as handle: pickle.dump(prob, handle)\nkeys=json.dumps(prob, sort_keys=True)\noutput_file=open('hackTechTweetClassificationModel.txt', 'w')\noutput_file.write(keys)\noutput_file.close()\n\noutput_file=open('hackTechTweetPHP.php', 'w')\n#Format of PHP output\n#$result=mysqli_query($con, \"INSERT INTO ttrain VALUES ('aaa','-2.232','222.4234')\" );\nfor k,v in prob.items():\n strop=\"$result=mysqli_query($con, \\\"INSERT INTO ttrain VALUES (\\'\"+str(k)+\"\\',\\'\"+str(v[0])+\"\\',\\'\"+str(v[1])+\"\\')\\\" );\\n\" \n output_file.write(strop)\noutput_file.close()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
"""
Users model
"""
# Django
from django.conf import settings
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.core.validators import RegexValidator
class User(AbstractUser):
"""User model"""
email = models.EmailField(
'email address',
unique=True,
error_messages={
'unique': 'A user with that email already exists'
}
)
phone_regex = RegexValidator(
regex=r'\+?1?\d{9,15}$',
message='Phone number must be entered in the right format'
)
phone_number = models.CharField(
validators=[phone_regex],
max_length=17
)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name', 'phone_number']
def __str__(self):
return self.username
class Profile(models.Model):
"""Profile model"""
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)
description = models.TextField('user description', max_length=255)
picture = models.ImageField(
upload_to='users/pictures',
blank=True,
null=True
)
is_authenticated = models.BooleanField('user is autheticated', default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.user.username
|
normal
|
{
"blob_id": "360813a573f672e3ec380da4237a6e131dbcb7e6",
"index": 2345,
"step-1": "<mask token>\n\n\nclass User(AbstractUser):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Profile(models.Model):\n \"\"\"Profile model\"\"\"\n user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, primary_key=True)\n description = models.TextField('user description', max_length=255)\n picture = models.ImageField(upload_to='users/pictures', blank=True,\n null=True)\n is_authenticated = models.BooleanField('user is autheticated', default=\n False)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n",
"step-2": "<mask token>\n\n\nclass User(AbstractUser):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.username\n\n\nclass Profile(models.Model):\n \"\"\"Profile model\"\"\"\n user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, primary_key=True)\n description = models.TextField('user description', max_length=255)\n picture = models.ImageField(upload_to='users/pictures', blank=True,\n null=True)\n is_authenticated = models.BooleanField('user is autheticated', default=\n False)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n",
"step-3": "<mask token>\n\n\nclass User(AbstractUser):\n \"\"\"User model\"\"\"\n email = models.EmailField('email address', unique=True, error_messages=\n {'unique': 'A user with that email already exists'})\n phone_regex = RegexValidator(regex='\\\\+?1?\\\\d{9,15}$', message=\n 'Phone number must be entered in the right format')\n phone_number = models.CharField(validators=[phone_regex], max_length=17)\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['username', 'first_name', 'last_name', 'phone_number']\n\n def __str__(self):\n return self.username\n\n\nclass Profile(models.Model):\n \"\"\"Profile model\"\"\"\n user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, primary_key=True)\n description = models.TextField('user description', max_length=255)\n picture = models.ImageField(upload_to='users/pictures', blank=True,\n null=True)\n is_authenticated = models.BooleanField('user is autheticated', default=\n False)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n",
"step-4": "<mask token>\nfrom django.conf import settings\nfrom django.db import models\nfrom django.contrib.auth.models import AbstractUser\nfrom django.core.validators import RegexValidator\n\n\nclass User(AbstractUser):\n \"\"\"User model\"\"\"\n email = models.EmailField('email address', unique=True, error_messages=\n {'unique': 'A user with that email already exists'})\n phone_regex = RegexValidator(regex='\\\\+?1?\\\\d{9,15}$', message=\n 'Phone number must be entered in the right format')\n phone_number = models.CharField(validators=[phone_regex], max_length=17)\n USERNAME_FIELD = 'email'\n REQUIRED_FIELDS = ['username', 'first_name', 'last_name', 'phone_number']\n\n def __str__(self):\n return self.username\n\n\nclass Profile(models.Model):\n \"\"\"Profile model\"\"\"\n user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.\n CASCADE, primary_key=True)\n description = models.TextField('user description', max_length=255)\n picture = models.ImageField(upload_to='users/pictures', blank=True,\n null=True)\n is_authenticated = models.BooleanField('user is autheticated', default=\n False)\n created_at = models.DateTimeField(auto_now_add=True)\n updated_at = models.DateTimeField(auto_now=True)\n\n def __str__(self):\n return self.user.username\n",
"step-5": "\"\"\"\r\nUsers model\r\n\"\"\"\r\n\r\n# Django\r\nfrom django.conf import settings\r\nfrom django.db import models\r\nfrom django.contrib.auth.models import AbstractUser\r\nfrom django.core.validators import RegexValidator\r\n\r\nclass User(AbstractUser):\r\n \"\"\"User model\"\"\"\r\n\r\n email = models.EmailField(\r\n 'email address',\r\n unique=True,\r\n error_messages={\r\n 'unique': 'A user with that email already exists'\r\n }\r\n )\r\n\r\n phone_regex = RegexValidator(\r\n regex=r'\\+?1?\\d{9,15}$',\r\n message='Phone number must be entered in the right format'\r\n )\r\n phone_number = models.CharField(\r\n validators=[phone_regex],\r\n max_length=17\r\n )\r\n\r\n USERNAME_FIELD = 'email'\r\n REQUIRED_FIELDS = ['username', 'first_name', 'last_name', 'phone_number']\r\n\r\n def __str__(self):\r\n return self.username\r\n\r\nclass Profile(models.Model):\r\n \"\"\"Profile model\"\"\"\r\n\r\n user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, primary_key=True)\r\n description = models.TextField('user description', max_length=255)\r\n picture = models.ImageField(\r\n upload_to='users/pictures',\r\n blank=True,\r\n null=True\r\n )\r\n is_authenticated = models.BooleanField('user is autheticated', default=False)\r\n\r\n created_at = models.DateTimeField(auto_now_add=True)\r\n updated_at = models.DateTimeField(auto_now=True)\r\n\r\n def __str__(self):\r\n return self.user.username\r\n \r\n",
"step-ids": [
5,
6,
8,
9,
10
]
}
|
[
5,
6,
8,
9,
10
] |
import pickle
import torch
data = pickle.load(open('dd0eb7901523d494d4aa324f474c782063e9e231.p', 'rb'))
torch.nn.functional.adaptive_avg_pool3d(**data)
|
normal
|
{
"blob_id": "20d09a616133295a6162a7ab1d7970ccbaf6de95",
"index": 1331,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntorch.nn.functional.adaptive_avg_pool3d(**data)\n",
"step-3": "<mask token>\ndata = pickle.load(open('dd0eb7901523d494d4aa324f474c782063e9e231.p', 'rb'))\ntorch.nn.functional.adaptive_avg_pool3d(**data)\n",
"step-4": "import pickle\nimport torch\ndata = pickle.load(open('dd0eb7901523d494d4aa324f474c782063e9e231.p', 'rb'))\ntorch.nn.functional.adaptive_avg_pool3d(**data)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
""" OCR that converts images to text """
from pytesseract import image_to_string
from PIL import Image
print image_to_string(Image.open('/Users/williamliu/Desktop/Screen Shot 2014-09-27 at 11.45.34 PM.png'))
#print image_to_string(Image.open('/Users/williamliu/Desktop/Screen Shot 2014-09-27 at 11.45.34 PM.png'))
#print image_to_string(Image.open('test-european.jpg'), lang='fra')
|
normal
|
{
"blob_id": "91ac4a23573abcb0ab024830dbc1daebd91bd40d",
"index": 2355,
"step-1": "\"\"\" OCR that converts images to text \"\"\"\n\nfrom pytesseract import image_to_string\nfrom PIL import Image\n\nprint image_to_string(Image.open('/Users/williamliu/Desktop/Screen Shot 2014-09-27 at 11.45.34 PM.png'))\n\n#print image_to_string(Image.open('/Users/williamliu/Desktop/Screen Shot 2014-09-27 at 11.45.34 PM.png'))\n#print image_to_string(Image.open('test-european.jpg'), lang='fra')\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding: utf-8 -*-
class Config(object):
def __init__(self):
self.config_dict = {
"data_path": {
# "vocab_path": "../data/cnews/cnews.vocab.txt",
"vocab_path": "../data/rumor/cnews.vocab.txt",
# "trainingSet_path": "../data/cnews/cnews.train.txt",
"trainingSet_path": "../data/rumor/train_list.txt",
# "valSet_path": "../data/cnews/cnews.val.txt",
"valSet_path": "../data/rumor/val_list.txt",
# "testingSet_path": "../data/cnews/cnews.test.txt",
"testingSet_path": "../data/rumor/test_list.txt"
},
"CNN_training_rule": {
"embedding_dim": 64,
"seq_length": 200,
"num_classes": 2,
"conv1_num_filters": 128,
"conv1_kernel_size": 1,
"conv2_num_filters": 128,
"conv2_kernel_size": 1,
"vocab_size": 5000,
"hidden_dim": 256,
"dropout_keep_prob": 0.5,
"learning_rate": 1e-3,
"batch_size": 64,
"epochs": 5,
"print_per_batch": 50,
"save_per_batch": 500
},
"LSTM": {
"seq_length": 300,
"num_classes": 2,
"vocab_size": 5000,
"batch_size": 64
},
"result": {
"CNN_model_path": "CNN_model.h5",
"LSTM_model_path": "LSTM_model.h5"
}
}
def get(self, section, name):
return self.config_dict[section][name]
|
normal
|
{
"blob_id": "9cb4e550a0d19b44ec8357882f353b04748b213b",
"index": 2589,
"step-1": "<mask token>\n",
"step-2": "class Config(object):\n <mask token>\n <mask token>\n",
"step-3": "class Config(object):\n <mask token>\n\n def get(self, section, name):\n return self.config_dict[section][name]\n",
"step-4": "class Config(object):\n\n def __init__(self):\n self.config_dict = {'data_path': {'vocab_path':\n '../data/rumor/cnews.vocab.txt', 'trainingSet_path':\n '../data/rumor/train_list.txt', 'valSet_path':\n '../data/rumor/val_list.txt', 'testingSet_path':\n '../data/rumor/test_list.txt'}, 'CNN_training_rule': {\n 'embedding_dim': 64, 'seq_length': 200, 'num_classes': 2,\n 'conv1_num_filters': 128, 'conv1_kernel_size': 1,\n 'conv2_num_filters': 128, 'conv2_kernel_size': 1, 'vocab_size':\n 5000, 'hidden_dim': 256, 'dropout_keep_prob': 0.5,\n 'learning_rate': 0.001, 'batch_size': 64, 'epochs': 5,\n 'print_per_batch': 50, 'save_per_batch': 500}, 'LSTM': {\n 'seq_length': 300, 'num_classes': 2, 'vocab_size': 5000,\n 'batch_size': 64}, 'result': {'CNN_model_path': 'CNN_model.h5',\n 'LSTM_model_path': 'LSTM_model.h5'}}\n\n def get(self, section, name):\n return self.config_dict[section][name]\n",
"step-5": "# -*- coding: utf-8 -*-\n\n\n\nclass Config(object):\n def __init__(self):\n self.config_dict = {\n \"data_path\": {\n # \"vocab_path\": \"../data/cnews/cnews.vocab.txt\",\n \"vocab_path\": \"../data/rumor/cnews.vocab.txt\",\n # \"trainingSet_path\": \"../data/cnews/cnews.train.txt\",\n \"trainingSet_path\": \"../data/rumor/train_list.txt\",\n # \"valSet_path\": \"../data/cnews/cnews.val.txt\",\n \"valSet_path\": \"../data/rumor/val_list.txt\",\n # \"testingSet_path\": \"../data/cnews/cnews.test.txt\",\n \"testingSet_path\": \"../data/rumor/test_list.txt\"\n },\n \"CNN_training_rule\": {\n \"embedding_dim\": 64,\n \"seq_length\": 200,\n \"num_classes\": 2,\n\n \"conv1_num_filters\": 128,\n \"conv1_kernel_size\": 1,\n\n \"conv2_num_filters\": 128,\n \"conv2_kernel_size\": 1,\n\n \"vocab_size\": 5000,\n\n \"hidden_dim\": 256,\n\n \"dropout_keep_prob\": 0.5,\n \"learning_rate\": 1e-3,\n\n \"batch_size\": 64,\n \"epochs\": 5,\n\n \"print_per_batch\": 50,\n \"save_per_batch\": 500\n },\n \"LSTM\": {\n \"seq_length\": 300,\n \"num_classes\": 2,\n \"vocab_size\": 5000,\n \"batch_size\": 64\n },\n \"result\": {\n \"CNN_model_path\": \"CNN_model.h5\",\n \"LSTM_model_path\": \"LSTM_model.h5\"\n }\n }\n\n def get(self, section, name):\n return self.config_dict[section][name]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from tkinter import ttk
from chapter04a.validated_mixin import ValidatedMixin
class RequiredEntry(ValidatedMixin, ttk.Entry):
def _focusout_validate(self, event):
valid = True
if not self.get():
valid = False
self.error.set('A value is required')
return valid
|
normal
|
{
"blob_id": "59047a113d76c64be48858258441fae5da505790",
"index": 5792,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass RequiredEntry(ValidatedMixin, ttk.Entry):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass RequiredEntry(ValidatedMixin, ttk.Entry):\n\n def _focusout_validate(self, event):\n valid = True\n if not self.get():\n valid = False\n self.error.set('A value is required')\n return valid\n",
"step-4": "from tkinter import ttk\nfrom chapter04a.validated_mixin import ValidatedMixin\n\n\nclass RequiredEntry(ValidatedMixin, ttk.Entry):\n\n def _focusout_validate(self, event):\n valid = True\n if not self.get():\n valid = False\n self.error.set('A value is required')\n return valid\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
class RetModel(object):
def __init__(self, code = 0, message = "success", data = None):
self.code = code
self.msg = message
self.data = data
|
normal
|
{
"blob_id": "ec395b93cecf8431fd0df1aa0151ebd32244c367",
"index": 4941,
"step-1": "<mask token>\n",
"step-2": "class RetModel(object):\n <mask token>\n",
"step-3": "class RetModel(object):\n\n def __init__(self, code=0, message='success', data=None):\n self.code = code\n self.msg = message\n self.data = data\n",
"step-4": "\r\nclass RetModel(object):\r\n def __init__(self, code = 0, message = \"success\", data = None):\r\n self.code = code\r\n self.msg = message\r\n self.data = data\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
stanCode Breakout Project
Adapted from Eric Roberts's Breakout by
Sonja Johnson-Yu, Kylie Jue, Nick Bowman,
and Jerry Liao
YOUR DESCRIPTION HERE
"""
from campy.gui.events.timer import pause
from breakoutgraphics import BreakoutGraphics
FRAME_RATE = 1000 / 120 # 120 frames per second.
NUM_LIVES = 3
def main():
graphics = BreakoutGraphics()
lives = NUM_LIVES # 生命
graphics.window.add(graphics.scoreboard, 0, graphics.window_height) # 計分板
# Add animation loop here!
while True:
pause(FRAME_RATE)
if graphics.ball_fall_down():
lives -= 1
if lives > 0:
graphics.reset_ball()
else:
graphics.game_over()
break
if graphics.you_win():
break
vx = graphics.getx()
vy = graphics.gety()
graphics.ball.move(vx, vy)
graphics.boundary()
graphics.collision()
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "b218f5e401510f844006cb6079737b54aa86827b",
"index": 2194,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nFRAME_RATE = 1000 / 120\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nfrom campy.gui.events.timer import pause\nfrom breakoutgraphics import BreakoutGraphics\nFRAME_RATE = 1000 / 120\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height)\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "\"\"\"\nstanCode Breakout Project\nAdapted from Eric Roberts's Breakout by\nSonja Johnson-Yu, Kylie Jue, Nick Bowman,\nand Jerry Liao\n\nYOUR DESCRIPTION HERE\n\"\"\"\n\nfrom campy.gui.events.timer import pause\nfrom breakoutgraphics import BreakoutGraphics\n\nFRAME_RATE = 1000 / 120 # 120 frames per second.\nNUM_LIVES = 3\n\n\ndef main():\n graphics = BreakoutGraphics()\n lives = NUM_LIVES # 生命\n\n graphics.window.add(graphics.scoreboard, 0, graphics.window_height) # 計分板\n\n # Add animation loop here!\n while True:\n pause(FRAME_RATE)\n if graphics.ball_fall_down():\n lives -= 1\n if lives > 0:\n graphics.reset_ball()\n else:\n graphics.game_over()\n break\n if graphics.you_win():\n break\n vx = graphics.getx()\n vy = graphics.gety()\n graphics.ball.move(vx, vy)\n graphics.boundary()\n graphics.collision()\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.